loader/dri3: Unify the style of function pointer calls in structs
[mesa.git] / src / loader / loader_dri3_helper.c
1 /*
2 * Copyright © 2013 Keith Packard
3 * Copyright © 2015 Boyan Ding
4 *
5 * Permission to use, copy, modify, distribute, and sell this software and its
6 * documentation for any purpose is hereby granted without fee, provided that
7 * the above copyright notice appear in all copies and that both that copyright
8 * notice and this permission notice appear in supporting documentation, and
9 * that the name of the copyright holders not be used in advertising or
10 * publicity pertaining to distribution of the software without specific,
11 * written prior permission. The copyright holders make no representations
12 * about the suitability of this software for any purpose. It is provided "as
13 * is" without express or implied warranty.
14 *
15 * THE COPYRIGHT HOLDERS DISCLAIM ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
16 * INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO
17 * EVENT SHALL THE COPYRIGHT HOLDERS BE LIABLE FOR ANY SPECIAL, INDIRECT OR
18 * CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
19 * DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
20 * TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
21 * OF THIS SOFTWARE.
22 */
23
24 #include <fcntl.h>
25 #include <stdlib.h>
26 #include <unistd.h>
27
28 #include <X11/xshmfence.h>
29 #include <xcb/xcb.h>
30 #include <xcb/dri3.h>
31 #include <xcb/present.h>
32
33 #include <X11/Xlib-xcb.h>
34
35 #include "loader_dri3_helper.h"
36
37 /* From xmlpool/options.h, user exposed so should be stable */
38 #define DRI_CONF_VBLANK_NEVER 0
39 #define DRI_CONF_VBLANK_DEF_INTERVAL_0 1
40 #define DRI_CONF_VBLANK_DEF_INTERVAL_1 2
41 #define DRI_CONF_VBLANK_ALWAYS_SYNC 3
42
43 static inline void
44 dri3_fence_reset(xcb_connection_t *c, struct loader_dri3_buffer *buffer)
45 {
46 xshmfence_reset(buffer->shm_fence);
47 }
48
49 static inline void
50 dri3_fence_set(struct loader_dri3_buffer *buffer)
51 {
52 xshmfence_trigger(buffer->shm_fence);
53 }
54
55 static inline void
56 dri3_fence_trigger(xcb_connection_t *c, struct loader_dri3_buffer *buffer)
57 {
58 xcb_sync_trigger_fence(c, buffer->sync_fence);
59 }
60
61 static inline void
62 dri3_fence_await(xcb_connection_t *c, struct loader_dri3_buffer *buffer)
63 {
64 xcb_flush(c);
65 xshmfence_await(buffer->shm_fence);
66 }
67
68 static void
69 dri3_update_num_back(struct loader_dri3_drawable *draw)
70 {
71 if (draw->flipping)
72 draw->num_back = 3;
73 else
74 draw->num_back = 2;
75 }
76
77 void
78 loader_dri3_set_swap_interval(struct loader_dri3_drawable *draw, int interval)
79 {
80 interval = draw->vtable->clamp_swap_interval(draw, interval);
81 draw->vtable->set_swap_interval(draw, interval);
82 dri3_update_num_back(draw);
83 }
84
85 /** dri3_free_render_buffer
86 *
87 * Free everything associated with one render buffer including pixmap, fence
88 * stuff and the driver image
89 */
90 static void
91 dri3_free_render_buffer(struct loader_dri3_drawable *draw,
92 struct loader_dri3_buffer *buffer)
93 {
94 if (buffer->own_pixmap)
95 xcb_free_pixmap(draw->conn, buffer->pixmap);
96 xcb_sync_destroy_fence(draw->conn, buffer->sync_fence);
97 xshmfence_unmap_shm(buffer->shm_fence);
98 draw->ext->image->destroyImage(buffer->image);
99 if (buffer->linear_buffer)
100 draw->ext->image->destroyImage(buffer->linear_buffer);
101 free(buffer);
102 }
103
104 void
105 loader_dri3_drawable_fini(struct loader_dri3_drawable *draw)
106 {
107 int i;
108
109 draw->ext->core->destroyDrawable(draw->dri_drawable);
110
111 for (i = 0; i < LOADER_DRI3_NUM_BUFFERS; i++) {
112 if (draw->buffers[i])
113 dri3_free_render_buffer(draw, draw->buffers[i]);
114 }
115
116 if (draw->special_event) {
117 xcb_void_cookie_t cookie =
118 xcb_present_select_input_checked(draw->conn, draw->eid, draw->drawable,
119 XCB_PRESENT_EVENT_MASK_NO_EVENT);
120
121 xcb_discard_reply(draw->conn, cookie.sequence);
122 xcb_unregister_for_special_event(draw->conn, draw->special_event);
123 }
124 }
125
126 int
127 loader_dri3_drawable_init(xcb_connection_t *conn,
128 xcb_drawable_t drawable,
129 __DRIscreen *dri_screen,
130 bool is_different_gpu,
131 const __DRIconfig *dri_config,
132 struct loader_dri3_extensions *ext,
133 const struct loader_dri3_vtable *vtable,
134 struct loader_dri3_drawable *draw)
135 {
136 xcb_get_geometry_cookie_t cookie;
137 xcb_get_geometry_reply_t *reply;
138 xcb_generic_error_t *error;
139 GLint vblank_mode = DRI_CONF_VBLANK_DEF_INTERVAL_1;
140 int swap_interval;
141
142 draw->conn = conn;
143 draw->ext = ext;
144 draw->vtable = vtable;
145 draw->drawable = drawable;
146 draw->dri_screen = dri_screen;
147 draw->is_different_gpu = is_different_gpu;
148
149 draw->have_back = 0;
150 draw->have_fake_front = 0;
151 draw->first_init = true;
152
153 if (draw->ext->config)
154 draw->ext->config->configQueryi(draw->dri_screen,
155 "vblank_mode", &vblank_mode);
156
157 switch (vblank_mode) {
158 case DRI_CONF_VBLANK_NEVER:
159 case DRI_CONF_VBLANK_DEF_INTERVAL_0:
160 swap_interval = 0;
161 break;
162 case DRI_CONF_VBLANK_DEF_INTERVAL_1:
163 case DRI_CONF_VBLANK_ALWAYS_SYNC:
164 default:
165 swap_interval = 1;
166 break;
167 }
168 draw->vtable->set_swap_interval(draw, swap_interval);
169
170 dri3_update_num_back(draw);
171
172 /* Create a new drawable */
173 draw->dri_drawable =
174 draw->ext->image_driver->createNewDrawable(dri_screen,
175 dri_config,
176 draw);
177
178 if (!draw->dri_drawable)
179 return 1;
180
181 cookie = xcb_get_geometry(draw->conn, draw->drawable);
182 reply = xcb_get_geometry_reply(draw->conn, cookie, &error);
183 if (reply == NULL || error != NULL) {
184 draw->ext->core->destroyDrawable(draw->dri_drawable);
185 return 1;
186 }
187
188 draw->width = reply->width;
189 draw->height = reply->height;
190 draw->depth = reply->depth;
191 draw->vtable->set_drawable_size(draw, draw->width, draw->height);
192 free(reply);
193
194 /*
195 * Make sure server has the same swap interval we do for the new
196 * drawable.
197 */
198 loader_dri3_set_swap_interval(draw, swap_interval);
199
200 return 0;
201 }
202
203 /*
204 * Process one Present event
205 */
206 static void
207 dri3_handle_present_event(struct loader_dri3_drawable *draw,
208 xcb_present_generic_event_t *ge)
209 {
210 switch (ge->evtype) {
211 case XCB_PRESENT_CONFIGURE_NOTIFY: {
212 xcb_present_configure_notify_event_t *ce = (void *) ge;
213
214 draw->width = ce->width;
215 draw->height = ce->height;
216 draw->vtable->set_drawable_size(draw, draw->width, draw->height);
217 break;
218 }
219 case XCB_PRESENT_COMPLETE_NOTIFY: {
220 xcb_present_complete_notify_event_t *ce = (void *) ge;
221
222 /* Compute the processed SBC number from the received 32-bit serial number
223 * merged with the upper 32-bits of the sent 64-bit serial number while
224 * checking for wrap.
225 */
226 if (ce->kind == XCB_PRESENT_COMPLETE_KIND_PIXMAP) {
227 draw->recv_sbc = (draw->send_sbc & 0xffffffff00000000LL) | ce->serial;
228 if (draw->recv_sbc > draw->send_sbc)
229 draw->recv_sbc -= 0x100000000;
230 switch (ce->mode) {
231 case XCB_PRESENT_COMPLETE_MODE_FLIP:
232 draw->flipping = true;
233 break;
234 case XCB_PRESENT_COMPLETE_MODE_COPY:
235 draw->flipping = false;
236 break;
237 }
238 dri3_update_num_back(draw);
239
240 if (draw->vtable->show_fps)
241 draw->vtable->show_fps(draw, ce->ust);
242
243 draw->ust = ce->ust;
244 draw->msc = ce->msc;
245 } else {
246 draw->recv_msc_serial = ce->serial;
247 draw->notify_ust = ce->ust;
248 draw->notify_msc = ce->msc;
249 }
250 break;
251 }
252 case XCB_PRESENT_EVENT_IDLE_NOTIFY: {
253 xcb_present_idle_notify_event_t *ie = (void *) ge;
254 int b;
255
256 for (b = 0; b < sizeof(draw->buffers) / sizeof(draw->buffers[0]); b++) {
257 struct loader_dri3_buffer *buf = draw->buffers[b];
258
259 if (buf && buf->pixmap == ie->pixmap) {
260 buf->busy = 0;
261 if (draw->num_back <= b && b < LOADER_DRI3_MAX_BACK) {
262 dri3_free_render_buffer(draw, buf);
263 draw->buffers[b] = NULL;
264 }
265 break;
266 }
267 }
268 break;
269 }
270 }
271 free(ge);
272 }
273
274 static bool
275 dri3_wait_for_event(struct loader_dri3_drawable *draw)
276 {
277 xcb_generic_event_t *ev;
278 xcb_present_generic_event_t *ge;
279
280 xcb_flush(draw->conn);
281 ev = xcb_wait_for_special_event(draw->conn, draw->special_event);
282 if (!ev)
283 return false;
284 ge = (void *) ev;
285 dri3_handle_present_event(draw, ge);
286 return true;
287 }
288
289 /** loader_dri3_wait_for_msc
290 *
291 * Get the X server to send an event when the target msc/divisor/remainder is
292 * reached.
293 */
294 bool
295 loader_dri3_wait_for_msc(struct loader_dri3_drawable *draw,
296 int64_t target_msc,
297 int64_t divisor, int64_t remainder,
298 int64_t *ust, int64_t *msc, int64_t *sbc)
299 {
300 uint32_t msc_serial;
301
302 msc_serial = ++draw->send_msc_serial;
303 xcb_present_notify_msc(draw->conn,
304 draw->drawable,
305 msc_serial,
306 target_msc,
307 divisor,
308 remainder);
309
310 xcb_flush(draw->conn);
311
312 /* Wait for the event */
313 if (draw->special_event) {
314 while ((int32_t) (msc_serial - draw->recv_msc_serial) > 0) {
315 if (!dri3_wait_for_event(draw))
316 return false;
317 }
318 }
319
320 *ust = draw->notify_ust;
321 *msc = draw->notify_msc;
322 *sbc = draw->recv_sbc;
323
324 return true;
325 }
326
327 /** loader_dri3_wait_for_sbc
328 *
329 * Wait for the completed swap buffer count to reach the specified
330 * target. Presumably the application knows that this will be reached with
331 * outstanding complete events, or we're going to be here awhile.
332 */
333 int
334 loader_dri3_wait_for_sbc(struct loader_dri3_drawable *draw,
335 int64_t target_sbc, int64_t *ust,
336 int64_t *msc, int64_t *sbc)
337 {
338 /* From the GLX_OML_sync_control spec:
339 *
340 * "If <target_sbc> = 0, the function will block until all previous
341 * swaps requested with glXSwapBuffersMscOML for that window have
342 * completed."
343 */
344 if (!target_sbc)
345 target_sbc = draw->send_sbc;
346
347 while (draw->recv_sbc < target_sbc) {
348 if (!dri3_wait_for_event(draw))
349 return 0;
350 }
351
352 *ust = draw->ust;
353 *msc = draw->msc;
354 *sbc = draw->recv_sbc;
355 return 1;
356 }
357
358 /** loader_dri3_find_back
359 *
360 * Find an idle back buffer. If there isn't one, then
361 * wait for a present idle notify event from the X server
362 */
363 static int
364 dri3_find_back(struct loader_dri3_drawable *draw)
365 {
366 int b;
367 xcb_generic_event_t *ev;
368 xcb_present_generic_event_t *ge;
369
370 for (;;) {
371 for (b = 0; b < draw->num_back; b++) {
372 int id = LOADER_DRI3_BACK_ID((b + draw->cur_back) % draw->num_back);
373 struct loader_dri3_buffer *buffer = draw->buffers[id];
374
375 if (!buffer || !buffer->busy) {
376 draw->cur_back = id;
377 return id;
378 }
379 }
380 xcb_flush(draw->conn);
381 ev = xcb_wait_for_special_event(draw->conn, draw->special_event);
382 if (!ev)
383 return -1;
384 ge = (void *) ev;
385 dri3_handle_present_event(draw, ge);
386 }
387 }
388
389 static xcb_gcontext_t
390 dri3_drawable_gc(struct loader_dri3_drawable *draw)
391 {
392 if (!draw->gc) {
393 uint32_t v = 0;
394 xcb_create_gc(draw->conn,
395 (draw->gc = xcb_generate_id(draw->conn)),
396 draw->drawable,
397 XCB_GC_GRAPHICS_EXPOSURES,
398 &v);
399 }
400 return draw->gc;
401 }
402
403
404 static struct loader_dri3_buffer *
405 dri3_back_buffer(struct loader_dri3_drawable *draw)
406 {
407 return draw->buffers[LOADER_DRI3_BACK_ID(draw->cur_back)];
408 }
409
410 static struct loader_dri3_buffer *
411 dri3_fake_front_buffer(struct loader_dri3_drawable *draw)
412 {
413 return draw->buffers[LOADER_DRI3_FRONT_ID];
414 }
415
416 static void
417 dri3_copy_area(xcb_connection_t *c,
418 xcb_drawable_t src_drawable,
419 xcb_drawable_t dst_drawable,
420 xcb_gcontext_t gc,
421 int16_t src_x,
422 int16_t src_y,
423 int16_t dst_x,
424 int16_t dst_y,
425 uint16_t width,
426 uint16_t height)
427 {
428 xcb_void_cookie_t cookie;
429
430 cookie = xcb_copy_area_checked(c,
431 src_drawable,
432 dst_drawable,
433 gc,
434 src_x,
435 src_y,
436 dst_x,
437 dst_y,
438 width,
439 height);
440 xcb_discard_reply(c, cookie.sequence);
441 }
442
443 /**
444 * Asks the driver to flush any queued work necessary for serializing with the
445 * X command stream, and optionally the slightly more strict requirement of
446 * glFlush() equivalence (which would require flushing even if nothing had
447 * been drawn to a window system framebuffer, for example).
448 */
449 void
450 loader_dri3_flush(struct loader_dri3_drawable *draw,
451 unsigned flags,
452 enum __DRI2throttleReason throttle_reason)
453 {
454 /* NEED TO CHECK WHETHER CONTEXT IS NULL */
455 __DRIcontext *dri_context = draw->vtable->get_dri_context(draw);
456
457 if (dri_context) {
458 draw->ext->flush->flush_with_flags(dri_context, draw->dri_drawable,
459 flags, throttle_reason);
460 }
461 }
462
463 void
464 loader_dri3_copy_sub_buffer(struct loader_dri3_drawable *draw,
465 int x, int y,
466 int width, int height,
467 bool flush)
468 {
469 struct loader_dri3_buffer *back;
470 unsigned flags = __DRI2_FLUSH_DRAWABLE;
471 __DRIcontext *dri_context;
472
473 dri_context = draw->vtable->get_dri_context(draw);
474
475 /* Check we have the right attachments */
476 if (!draw->have_back || draw->is_pixmap)
477 return;
478
479 if (flush)
480 flags |= __DRI2_FLUSH_CONTEXT;
481 loader_dri3_flush(draw, flags, __DRI2_THROTTLE_SWAPBUFFER);
482
483 back = dri3_back_buffer(draw);
484 y = draw->height - y - height;
485
486 if (draw->is_different_gpu && draw->vtable->in_current_context(draw)) {
487 /* Update the linear buffer part of the back buffer
488 * for the dri3_copy_area operation
489 */
490 draw->ext->image->blitImage(dri_context,
491 back->linear_buffer,
492 back->image,
493 0, 0, back->width,
494 back->height,
495 0, 0, back->width,
496 back->height, __BLIT_FLAG_FLUSH);
497 /* We use blitImage to update our fake front,
498 */
499 if (draw->have_fake_front)
500 draw->ext->image->blitImage(dri_context,
501 dri3_fake_front_buffer(draw)->image,
502 back->image,
503 x, y, width, height,
504 x, y, width, height, __BLIT_FLAG_FLUSH);
505 }
506
507 dri3_fence_reset(draw->conn, back);
508 dri3_copy_area(draw->conn,
509 dri3_back_buffer(draw)->pixmap,
510 draw->drawable,
511 dri3_drawable_gc(draw),
512 x, y, x, y, width, height);
513 dri3_fence_trigger(draw->conn, back);
514 /* Refresh the fake front (if present) after we just damaged the real
515 * front.
516 */
517 if (draw->have_fake_front && !draw->is_different_gpu) {
518 dri3_fence_reset(draw->conn, dri3_fake_front_buffer(draw));
519 dri3_copy_area(draw->conn,
520 dri3_back_buffer(draw)->pixmap,
521 dri3_fake_front_buffer(draw)->pixmap,
522 dri3_drawable_gc(draw),
523 x, y, x, y, width, height);
524 dri3_fence_trigger(draw->conn, dri3_fake_front_buffer(draw));
525 dri3_fence_await(draw->conn, dri3_fake_front_buffer(draw));
526 }
527 dri3_fence_await(draw->conn, back);
528 }
529
530 void
531 loader_dri3_copy_drawable(struct loader_dri3_drawable *draw,
532 xcb_drawable_t dest,
533 xcb_drawable_t src)
534 {
535 loader_dri3_flush(draw, __DRI2_FLUSH_DRAWABLE, 0);
536
537 dri3_fence_reset(draw->conn, dri3_fake_front_buffer(draw));
538 dri3_copy_area(draw->conn,
539 src, dest,
540 dri3_drawable_gc(draw),
541 0, 0, 0, 0, draw->width, draw->height);
542 dri3_fence_trigger(draw->conn, dri3_fake_front_buffer(draw));
543 dri3_fence_await(draw->conn, dri3_fake_front_buffer(draw));
544 }
545
546 void
547 loader_dri3_wait_x(struct loader_dri3_drawable *draw)
548 {
549 struct loader_dri3_buffer *front;
550 __DRIcontext *dri_context;
551
552 if (draw == NULL || !draw->have_fake_front)
553 return;
554
555 front = dri3_fake_front_buffer(draw);
556 dri_context = draw->vtable->get_dri_context(draw);
557
558 loader_dri3_copy_drawable(draw, front->pixmap, draw->drawable);
559
560 /* In the psc->is_different_gpu case, the linear buffer has been updated,
561 * but not yet the tiled buffer.
562 * Copy back to the tiled buffer we use for rendering.
563 * Note that we don't need flushing.
564 */
565 if (draw->is_different_gpu && draw->vtable->in_current_context(draw))
566 draw->ext->image->blitImage(dri_context,
567 front->image,
568 front->linear_buffer,
569 0, 0, front->width,
570 front->height,
571 0, 0, front->width,
572 front->height, 0);
573 }
574
575 void
576 loader_dri3_wait_gl(struct loader_dri3_drawable *draw)
577 {
578 struct loader_dri3_buffer *front;
579 __DRIcontext *dri_context;
580
581 if (draw == NULL || !draw->have_fake_front)
582 return;
583
584 front = dri3_fake_front_buffer(draw);
585 dri_context = draw->vtable->get_dri_context(draw);
586
587 /* In the psc->is_different_gpu case, we update the linear_buffer
588 * before updating the real front.
589 */
590 if (draw->is_different_gpu && draw->vtable->in_current_context(draw))
591 draw->ext->image->blitImage(dri_context,
592 front->linear_buffer,
593 front->image,
594 0, 0, front->width,
595 front->height,
596 0, 0, front->width,
597 front->height, __BLIT_FLAG_FLUSH);
598 loader_dri3_copy_drawable(draw, draw->drawable, front->pixmap);
599 }
600
601 /** dri3_flush_present_events
602 *
603 * Process any present events that have been received from the X server
604 */
605 static void
606 dri3_flush_present_events(struct loader_dri3_drawable *draw)
607 {
608 /* Check to see if any configuration changes have occurred
609 * since we were last invoked
610 */
611 if (draw->special_event) {
612 xcb_generic_event_t *ev;
613
614 while ((ev = xcb_poll_for_special_event(draw->conn,
615 draw->special_event)) != NULL) {
616 xcb_present_generic_event_t *ge = (void *) ev;
617 dri3_handle_present_event(draw, ge);
618 }
619 }
620 }
621
622 /** loader_dri3_swap_buffers_msc
623 *
624 * Make the current back buffer visible using the present extension
625 */
626 int64_t
627 loader_dri3_swap_buffers_msc(struct loader_dri3_drawable *draw,
628 int64_t target_msc, int64_t divisor,
629 int64_t remainder, unsigned flush_flags,
630 bool force_copy)
631 {
632 struct loader_dri3_buffer *back;
633 __DRIcontext *dri_context;
634 int64_t ret = 0;
635 uint32_t options = XCB_PRESENT_OPTION_NONE;
636 int swap_interval;
637
638 dri_context = draw->vtable->get_dri_context(draw);
639 swap_interval = draw->vtable->get_swap_interval(draw);
640
641 draw->vtable->flush_drawable(draw, flush_flags);
642
643 back = draw->buffers[LOADER_DRI3_BACK_ID(draw->cur_back)];
644 if (draw->is_different_gpu && back) {
645 /* Update the linear buffer before presenting the pixmap */
646 draw->ext->image->blitImage(dri_context,
647 back->linear_buffer,
648 back->image,
649 0, 0, back->width,
650 back->height,
651 0, 0, back->width,
652 back->height, __BLIT_FLAG_FLUSH);
653 /* Update the fake front */
654 if (draw->have_fake_front)
655 draw->ext->image->blitImage(dri_context,
656 draw->buffers[LOADER_DRI3_FRONT_ID]->image,
657 back->image,
658 0, 0, draw->width, draw->height,
659 0, 0, draw->width, draw->height,
660 __BLIT_FLAG_FLUSH);
661 }
662
663 dri3_flush_present_events(draw);
664
665 if (back && !draw->is_pixmap) {
666 dri3_fence_reset(draw->conn, back);
667
668 /* Compute when we want the frame shown by taking the last known
669 * successful MSC and adding in a swap interval for each outstanding swap
670 * request. target_msc=divisor=remainder=0 means "Use glXSwapBuffers()
671 * semantic"
672 */
673 ++draw->send_sbc;
674 if (target_msc == 0 && divisor == 0 && remainder == 0)
675 target_msc = draw->msc + swap_interval *
676 (draw->send_sbc - draw->recv_sbc);
677 else if (divisor == 0 && remainder > 0) {
678 /* From the GLX_OML_sync_control spec:
679 * "If <divisor> = 0, the swap will occur when MSC becomes
680 * greater than or equal to <target_msc>."
681 *
682 * Note that there's no mention of the remainder. The Present
683 * extension throws BadValue for remainder != 0 with divisor == 0, so
684 * just drop the passed in value.
685 */
686 remainder = 0;
687 }
688
689 /* From the GLX_EXT_swap_control spec
690 * and the EGL 1.4 spec (page 53):
691 *
692 * "If <interval> is set to a value of 0, buffer swaps are not
693 * synchronized to a video frame."
694 *
695 * Implementation note: It is possible to enable triple buffering
696 * behaviour by not using XCB_PRESENT_OPTION_ASYNC, but this should not be
697 * the default.
698 */
699 if (swap_interval == 0)
700 options |= XCB_PRESENT_OPTION_ASYNC;
701 if (force_copy)
702 options |= XCB_PRESENT_OPTION_COPY;
703
704 back->busy = 1;
705 back->last_swap = draw->send_sbc;
706 xcb_present_pixmap(draw->conn,
707 draw->drawable,
708 back->pixmap,
709 (uint32_t) draw->send_sbc,
710 0, /* valid */
711 0, /* update */
712 0, /* x_off */
713 0, /* y_off */
714 None, /* target_crtc */
715 None,
716 back->sync_fence,
717 options,
718 target_msc,
719 divisor,
720 remainder, 0, NULL);
721 ret = (int64_t) draw->send_sbc;
722
723 /* If there's a fake front, then copy the source back buffer
724 * to the fake front to keep it up to date. This needs
725 * to reset the fence and make future users block until
726 * the X server is done copying the bits
727 */
728 if (draw->have_fake_front && !draw->is_different_gpu) {
729 dri3_fence_reset(draw->conn, draw->buffers[LOADER_DRI3_FRONT_ID]);
730 dri3_copy_area(draw->conn,
731 back->pixmap,
732 draw->buffers[LOADER_DRI3_FRONT_ID]->pixmap,
733 dri3_drawable_gc(draw),
734 0, 0, 0, 0,
735 draw->width, draw->height);
736 dri3_fence_trigger(draw->conn, draw->buffers[LOADER_DRI3_FRONT_ID]);
737 }
738 xcb_flush(draw->conn);
739 if (draw->stamp)
740 ++(*draw->stamp);
741 }
742
743 draw->ext->flush->invalidate(draw->dri_drawable);
744
745 return ret;
746 }
747
748 int
749 loader_dri3_query_buffer_age(struct loader_dri3_drawable *draw)
750 {
751 int back_id = LOADER_DRI3_BACK_ID(dri3_find_back(draw));
752
753 if (back_id < 0 || !draw->buffers[back_id])
754 return 0;
755
756 if (draw->buffers[back_id]->last_swap != 0)
757 return draw->send_sbc - draw->buffers[back_id]->last_swap + 1;
758 else
759 return 0;
760 }
761
762 /** loader_dri3_open
763 *
764 * Wrapper around xcb_dri3_open
765 */
766 int
767 loader_dri3_open(xcb_connection_t *conn,
768 xcb_window_t root,
769 uint32_t provider)
770 {
771 xcb_dri3_open_cookie_t cookie;
772 xcb_dri3_open_reply_t *reply;
773 int fd;
774
775 cookie = xcb_dri3_open(conn,
776 root,
777 provider);
778
779 reply = xcb_dri3_open_reply(conn, cookie, NULL);
780 if (!reply)
781 return -1;
782
783 if (reply->nfd != 1) {
784 free(reply);
785 return -1;
786 }
787
788 fd = xcb_dri3_open_reply_fds(conn, reply)[0];
789 free(reply);
790 fcntl(fd, F_SETFD, fcntl(fd, F_GETFD) | FD_CLOEXEC);
791
792 return fd;
793 }
794
795 static uint32_t
796 dri3_cpp_for_format(uint32_t format) {
797 switch (format) {
798 case __DRI_IMAGE_FORMAT_R8:
799 return 1;
800 case __DRI_IMAGE_FORMAT_RGB565:
801 case __DRI_IMAGE_FORMAT_GR88:
802 return 2;
803 case __DRI_IMAGE_FORMAT_XRGB8888:
804 case __DRI_IMAGE_FORMAT_ARGB8888:
805 case __DRI_IMAGE_FORMAT_ABGR8888:
806 case __DRI_IMAGE_FORMAT_XBGR8888:
807 case __DRI_IMAGE_FORMAT_XRGB2101010:
808 case __DRI_IMAGE_FORMAT_ARGB2101010:
809 case __DRI_IMAGE_FORMAT_SARGB8:
810 return 4;
811 case __DRI_IMAGE_FORMAT_NONE:
812 default:
813 return 0;
814 }
815 }
816
817 /** loader_dri3_alloc_render_buffer
818 *
819 * Use the driver createImage function to construct a __DRIimage, then
820 * get a file descriptor for that and create an X pixmap from that
821 *
822 * Allocate an xshmfence for synchronization
823 */
824 static struct loader_dri3_buffer *
825 dri3_alloc_render_buffer(struct loader_dri3_drawable *draw, unsigned int format,
826 int width, int height, int depth)
827 {
828 struct loader_dri3_buffer *buffer;
829 __DRIimage *pixmap_buffer;
830 xcb_pixmap_t pixmap;
831 xcb_sync_fence_t sync_fence;
832 struct xshmfence *shm_fence;
833 int buffer_fd, fence_fd;
834 int stride;
835
836 /* Create an xshmfence object and
837 * prepare to send that to the X server
838 */
839
840 fence_fd = xshmfence_alloc_shm();
841 if (fence_fd < 0)
842 return NULL;
843
844 shm_fence = xshmfence_map_shm(fence_fd);
845 if (shm_fence == NULL)
846 goto no_shm_fence;
847
848 /* Allocate the image from the driver
849 */
850 buffer = calloc(1, sizeof *buffer);
851 if (!buffer)
852 goto no_buffer;
853
854 buffer->cpp = dri3_cpp_for_format(format);
855 if (!buffer->cpp)
856 goto no_image;
857
858 if (!draw->is_different_gpu) {
859 buffer->image = draw->ext->image->createImage(draw->dri_screen,
860 width, height,
861 format,
862 __DRI_IMAGE_USE_SHARE |
863 __DRI_IMAGE_USE_SCANOUT |
864 __DRI_IMAGE_USE_BACKBUFFER,
865 buffer);
866 pixmap_buffer = buffer->image;
867
868 if (!buffer->image)
869 goto no_image;
870 } else {
871 buffer->image = draw->ext->image->createImage(draw->dri_screen,
872 width, height,
873 format,
874 0,
875 buffer);
876
877 if (!buffer->image)
878 goto no_image;
879
880 buffer->linear_buffer =
881 draw->ext->image->createImage(draw->dri_screen,
882 width, height, format,
883 __DRI_IMAGE_USE_SHARE |
884 __DRI_IMAGE_USE_LINEAR |
885 __DRI_IMAGE_USE_BACKBUFFER,
886 buffer);
887 pixmap_buffer = buffer->linear_buffer;
888
889 if (!buffer->linear_buffer)
890 goto no_linear_buffer;
891 }
892
893 /* X wants the stride, so ask the image for it
894 */
895 if (!draw->ext->image->queryImage(pixmap_buffer, __DRI_IMAGE_ATTRIB_STRIDE,
896 &stride))
897 goto no_buffer_attrib;
898
899 buffer->pitch = stride;
900
901 if (!draw->ext->image->queryImage(pixmap_buffer, __DRI_IMAGE_ATTRIB_FD,
902 &buffer_fd))
903 goto no_buffer_attrib;
904
905 xcb_dri3_pixmap_from_buffer(draw->conn,
906 (pixmap = xcb_generate_id(draw->conn)),
907 draw->drawable,
908 buffer->size,
909 width, height, buffer->pitch,
910 depth, buffer->cpp * 8,
911 buffer_fd);
912
913 xcb_dri3_fence_from_fd(draw->conn,
914 pixmap,
915 (sync_fence = xcb_generate_id(draw->conn)),
916 false,
917 fence_fd);
918
919 buffer->pixmap = pixmap;
920 buffer->own_pixmap = true;
921 buffer->sync_fence = sync_fence;
922 buffer->shm_fence = shm_fence;
923 buffer->width = width;
924 buffer->height = height;
925
926 /* Mark the buffer as idle
927 */
928 dri3_fence_set(buffer);
929
930 return buffer;
931
932 no_buffer_attrib:
933 draw->ext->image->destroyImage(pixmap_buffer);
934 no_linear_buffer:
935 if (draw->is_different_gpu)
936 draw->ext->image->destroyImage(buffer->image);
937 no_image:
938 free(buffer);
939 no_buffer:
940 xshmfence_unmap_shm(shm_fence);
941 no_shm_fence:
942 close(fence_fd);
943 return NULL;
944 }
945
946 /** loader_dri3_update_drawable
947 *
948 * Called the first time we use the drawable and then
949 * after we receive present configure notify events to
950 * track the geometry of the drawable
951 */
952 static int
953 dri3_update_drawable(__DRIdrawable *driDrawable,
954 struct loader_dri3_drawable *draw)
955 {
956 if (draw->first_init) {
957 xcb_get_geometry_cookie_t geom_cookie;
958 xcb_get_geometry_reply_t *geom_reply;
959 xcb_void_cookie_t cookie;
960 xcb_generic_error_t *error;
961 xcb_present_query_capabilities_cookie_t present_capabilities_cookie;
962 xcb_present_query_capabilities_reply_t *present_capabilities_reply;
963
964 draw->first_init = false;
965
966 /* Try to select for input on the window.
967 *
968 * If the drawable is a window, this will get our events
969 * delivered.
970 *
971 * Otherwise, we'll get a BadWindow error back from this request which
972 * will let us know that the drawable is a pixmap instead.
973 */
974
975 draw->eid = xcb_generate_id(draw->conn);
976 cookie =
977 xcb_present_select_input_checked(draw->conn, draw->eid, draw->drawable,
978 XCB_PRESENT_EVENT_MASK_CONFIGURE_NOTIFY |
979 XCB_PRESENT_EVENT_MASK_COMPLETE_NOTIFY |
980 XCB_PRESENT_EVENT_MASK_IDLE_NOTIFY);
981
982 present_capabilities_cookie =
983 xcb_present_query_capabilities(draw->conn, draw->drawable);
984
985 /* Create an XCB event queue to hold present events outside of the usual
986 * application event queue
987 */
988 draw->special_event = xcb_register_for_special_xge(draw->conn,
989 &xcb_present_id,
990 draw->eid,
991 draw->stamp);
992 geom_cookie = xcb_get_geometry(draw->conn, draw->drawable);
993
994 geom_reply = xcb_get_geometry_reply(draw->conn, geom_cookie, NULL);
995
996 if (!geom_reply)
997 return false;
998
999 draw->width = geom_reply->width;
1000 draw->height = geom_reply->height;
1001 draw->depth = geom_reply->depth;
1002 draw->vtable->set_drawable_size(draw, draw->width, draw->height);
1003
1004 free(geom_reply);
1005
1006 draw->is_pixmap = false;
1007
1008 /* Check to see if our select input call failed. If it failed with a
1009 * BadWindow error, then assume the drawable is a pixmap. Destroy the
1010 * special event queue created above and mark the drawable as a pixmap
1011 */
1012
1013 error = xcb_request_check(draw->conn, cookie);
1014
1015 present_capabilities_reply =
1016 xcb_present_query_capabilities_reply(draw->conn,
1017 present_capabilities_cookie,
1018 NULL);
1019
1020 if (present_capabilities_reply) {
1021 draw->present_capabilities = present_capabilities_reply->capabilities;
1022 free(present_capabilities_reply);
1023 } else
1024 draw->present_capabilities = 0;
1025
1026 if (error) {
1027 if (error->error_code != BadWindow) {
1028 free(error);
1029 return false;
1030 }
1031 draw->is_pixmap = true;
1032 xcb_unregister_for_special_event(draw->conn, draw->special_event);
1033 draw->special_event = NULL;
1034 }
1035 }
1036 dri3_flush_present_events(draw);
1037 return true;
1038 }
1039
1040 /* the DRIimage createImage function takes __DRI_IMAGE_FORMAT codes, while
1041 * the createImageFromFds call takes __DRI_IMAGE_FOURCC codes. To avoid
1042 * complete confusion, just deal in __DRI_IMAGE_FORMAT codes for now and
1043 * translate to __DRI_IMAGE_FOURCC codes in the call to createImageFromFds
1044 */
1045 static int
1046 image_format_to_fourcc(int format)
1047 {
1048
1049 /* Convert from __DRI_IMAGE_FORMAT to __DRI_IMAGE_FOURCC (sigh) */
1050 switch (format) {
1051 case __DRI_IMAGE_FORMAT_SARGB8: return __DRI_IMAGE_FOURCC_SARGB8888;
1052 case __DRI_IMAGE_FORMAT_RGB565: return __DRI_IMAGE_FOURCC_RGB565;
1053 case __DRI_IMAGE_FORMAT_XRGB8888: return __DRI_IMAGE_FOURCC_XRGB8888;
1054 case __DRI_IMAGE_FORMAT_ARGB8888: return __DRI_IMAGE_FOURCC_ARGB8888;
1055 case __DRI_IMAGE_FORMAT_ABGR8888: return __DRI_IMAGE_FOURCC_ABGR8888;
1056 case __DRI_IMAGE_FORMAT_XBGR8888: return __DRI_IMAGE_FOURCC_XBGR8888;
1057 }
1058 return 0;
1059 }
1060
1061 __DRIimage *
1062 loader_dri3_create_image(xcb_connection_t *c,
1063 xcb_dri3_buffer_from_pixmap_reply_t *bp_reply,
1064 unsigned int format,
1065 __DRIscreen *dri_screen,
1066 const __DRIimageExtension *image,
1067 void *loaderPrivate)
1068 {
1069 int *fds;
1070 __DRIimage *image_planar, *ret;
1071 int stride, offset;
1072
1073 /* Get an FD for the pixmap object
1074 */
1075 fds = xcb_dri3_buffer_from_pixmap_reply_fds(c, bp_reply);
1076
1077 stride = bp_reply->stride;
1078 offset = 0;
1079
1080 /* createImageFromFds creates a wrapper __DRIimage structure which
1081 * can deal with multiple planes for things like Yuv images. So, once
1082 * we've gotten the planar wrapper, pull the single plane out of it and
1083 * discard the wrapper.
1084 */
1085 image_planar = image->createImageFromFds(dri_screen,
1086 bp_reply->width,
1087 bp_reply->height,
1088 image_format_to_fourcc(format),
1089 fds, 1,
1090 &stride, &offset, loaderPrivate);
1091 close(fds[0]);
1092 if (!image_planar)
1093 return NULL;
1094
1095 ret = image->fromPlanar(image_planar, 0, loaderPrivate);
1096
1097 image->destroyImage(image_planar);
1098
1099 return ret;
1100 }
1101
1102 /** dri3_get_pixmap_buffer
1103 *
1104 * Get the DRM object for a pixmap from the X server and
1105 * wrap that with a __DRIimage structure using createImageFromFds
1106 */
1107 static struct loader_dri3_buffer *
1108 dri3_get_pixmap_buffer(__DRIdrawable *driDrawable, unsigned int format,
1109 enum loader_dri3_buffer_type buffer_type,
1110 struct loader_dri3_drawable *draw)
1111 {
1112 int buf_id = loader_dri3_pixmap_buf_id(buffer_type);
1113 struct loader_dri3_buffer *buffer = draw->buffers[buf_id];
1114 xcb_drawable_t pixmap;
1115 xcb_dri3_buffer_from_pixmap_cookie_t bp_cookie;
1116 xcb_dri3_buffer_from_pixmap_reply_t *bp_reply;
1117 xcb_sync_fence_t sync_fence;
1118 struct xshmfence *shm_fence;
1119 int fence_fd;
1120 __DRIscreen *cur_screen;
1121
1122 if (buffer)
1123 return buffer;
1124
1125 pixmap = draw->drawable;
1126
1127 buffer = calloc(1, sizeof *buffer);
1128 if (!buffer)
1129 goto no_buffer;
1130
1131 fence_fd = xshmfence_alloc_shm();
1132 if (fence_fd < 0)
1133 goto no_fence;
1134 shm_fence = xshmfence_map_shm(fence_fd);
1135 if (shm_fence == NULL) {
1136 close (fence_fd);
1137 goto no_fence;
1138 }
1139
1140 xcb_dri3_fence_from_fd(draw->conn,
1141 pixmap,
1142 (sync_fence = xcb_generate_id(draw->conn)),
1143 false,
1144 fence_fd);
1145
1146 bp_cookie = xcb_dri3_buffer_from_pixmap(draw->conn, pixmap);
1147 bp_reply = xcb_dri3_buffer_from_pixmap_reply(draw->conn, bp_cookie, NULL);
1148 if (!bp_reply)
1149 goto no_image;
1150
1151 /* Get the currently-bound screen or revert to using the drawable's screen if
1152 * no contexts are currently bound. The latter case is at least necessary for
1153 * obs-studio, when using Window Capture (Xcomposite) as a Source.
1154 */
1155 cur_screen = draw->vtable->get_dri_screen(draw);
1156 if (!cur_screen) {
1157 cur_screen = draw->dri_screen;
1158 }
1159
1160 buffer->image = loader_dri3_create_image(draw->conn, bp_reply, format,
1161 cur_screen, draw->ext->image,
1162 buffer);
1163 if (!buffer->image)
1164 goto no_image;
1165
1166 buffer->pixmap = pixmap;
1167 buffer->own_pixmap = false;
1168 buffer->width = bp_reply->width;
1169 buffer->height = bp_reply->height;
1170 buffer->buffer_type = buffer_type;
1171 buffer->shm_fence = shm_fence;
1172 buffer->sync_fence = sync_fence;
1173
1174 draw->buffers[buf_id] = buffer;
1175
1176 free(bp_reply);
1177
1178 return buffer;
1179
1180 no_image:
1181 free(bp_reply);
1182 xcb_sync_destroy_fence(draw->conn, sync_fence);
1183 xshmfence_unmap_shm(shm_fence);
1184 no_fence:
1185 free(buffer);
1186 no_buffer:
1187 return NULL;
1188 }
1189
1190 /** dri3_get_buffer
1191 *
1192 * Find a front or back buffer, allocating new ones as necessary
1193 */
1194 static struct loader_dri3_buffer *
1195 dri3_get_buffer(__DRIdrawable *driDrawable,
1196 unsigned int format,
1197 enum loader_dri3_buffer_type buffer_type,
1198 struct loader_dri3_drawable *draw)
1199 {
1200 struct loader_dri3_buffer *buffer;
1201 int buf_id;
1202 __DRIcontext *dri_context;
1203
1204 dri_context = draw->vtable->get_dri_context(draw);
1205
1206 if (buffer_type == loader_dri3_buffer_back) {
1207 buf_id = dri3_find_back(draw);
1208
1209 if (buf_id < 0)
1210 return NULL;
1211 } else {
1212 buf_id = LOADER_DRI3_FRONT_ID;
1213 }
1214
1215 buffer = draw->buffers[buf_id];
1216
1217 /* Allocate a new buffer if there isn't an old one, or if that
1218 * old one is the wrong size
1219 */
1220 if (!buffer || buffer->width != draw->width ||
1221 buffer->height != draw->height) {
1222 struct loader_dri3_buffer *new_buffer;
1223
1224 /* Allocate the new buffers
1225 */
1226 new_buffer = dri3_alloc_render_buffer(draw,
1227 format,
1228 draw->width,
1229 draw->height,
1230 draw->depth);
1231 if (!new_buffer)
1232 return NULL;
1233
1234 /* When resizing, copy the contents of the old buffer, waiting for that
1235 * copy to complete using our fences before proceeding
1236 */
1237 switch (buffer_type) {
1238 case loader_dri3_buffer_back:
1239 if (buffer) {
1240 if (!buffer->linear_buffer) {
1241 dri3_fence_reset(draw->conn, new_buffer);
1242 dri3_fence_await(draw->conn, buffer);
1243 dri3_copy_area(draw->conn,
1244 buffer->pixmap,
1245 new_buffer->pixmap,
1246 dri3_drawable_gc(draw),
1247 0, 0, 0, 0,
1248 draw->width, draw->height);
1249 dri3_fence_trigger(draw->conn, new_buffer);
1250 } else if (draw->vtable->in_current_context(draw)) {
1251 draw->ext->image->blitImage(dri_context,
1252 new_buffer->image,
1253 buffer->image,
1254 0, 0, draw->width, draw->height,
1255 0, 0, draw->width, draw->height, 0);
1256 }
1257 dri3_free_render_buffer(draw, buffer);
1258 }
1259 break;
1260 case loader_dri3_buffer_front:
1261 dri3_fence_reset(draw->conn, new_buffer);
1262 dri3_copy_area(draw->conn,
1263 draw->drawable,
1264 new_buffer->pixmap,
1265 dri3_drawable_gc(draw),
1266 0, 0, 0, 0,
1267 draw->width, draw->height);
1268 dri3_fence_trigger(draw->conn, new_buffer);
1269
1270 if (new_buffer->linear_buffer &&
1271 draw->vtable->in_current_context(draw)) {
1272 dri3_fence_await(draw->conn, new_buffer);
1273 draw->ext->image->blitImage(dri_context,
1274 new_buffer->image,
1275 new_buffer->linear_buffer,
1276 0, 0, draw->width, draw->height,
1277 0, 0, draw->width, draw->height, 0);
1278 }
1279 break;
1280 }
1281 buffer = new_buffer;
1282 buffer->buffer_type = buffer_type;
1283 draw->buffers[buf_id] = buffer;
1284 }
1285 dri3_fence_await(draw->conn, buffer);
1286
1287 /* Return the requested buffer */
1288 return buffer;
1289 }
1290
1291 /** dri3_free_buffers
1292 *
1293 * Free the front bufffer or all of the back buffers. Used
1294 * when the application changes which buffers it needs
1295 */
1296 static void
1297 dri3_free_buffers(__DRIdrawable *driDrawable,
1298 enum loader_dri3_buffer_type buffer_type,
1299 struct loader_dri3_drawable *draw)
1300 {
1301 struct loader_dri3_buffer *buffer;
1302 int first_id;
1303 int n_id;
1304 int buf_id;
1305
1306 switch (buffer_type) {
1307 case loader_dri3_buffer_back:
1308 first_id = LOADER_DRI3_BACK_ID(0);
1309 n_id = LOADER_DRI3_MAX_BACK;
1310 break;
1311 case loader_dri3_buffer_front:
1312 first_id = LOADER_DRI3_FRONT_ID;
1313 n_id = 1;
1314 }
1315
1316 for (buf_id = first_id; buf_id < first_id + n_id; buf_id++) {
1317 buffer = draw->buffers[buf_id];
1318 if (buffer) {
1319 dri3_free_render_buffer(draw, buffer);
1320 draw->buffers[buf_id] = NULL;
1321 }
1322 }
1323 }
1324
1325 /** loader_dri3_get_buffers
1326 *
1327 * The published buffer allocation API.
1328 * Returns all of the necessary buffers, allocating
1329 * as needed.
1330 */
1331 int
1332 loader_dri3_get_buffers(__DRIdrawable *driDrawable,
1333 unsigned int format,
1334 uint32_t *stamp,
1335 void *loaderPrivate,
1336 uint32_t buffer_mask,
1337 struct __DRIimageList *buffers)
1338 {
1339 struct loader_dri3_drawable *draw = loaderPrivate;
1340 struct loader_dri3_buffer *front, *back;
1341
1342 buffers->image_mask = 0;
1343 buffers->front = NULL;
1344 buffers->back = NULL;
1345
1346 front = NULL;
1347 back = NULL;
1348
1349 if (!dri3_update_drawable(driDrawable, draw))
1350 return false;
1351
1352 /* pixmaps always have front buffers */
1353 if (draw->is_pixmap)
1354 buffer_mask |= __DRI_IMAGE_BUFFER_FRONT;
1355
1356 if (buffer_mask & __DRI_IMAGE_BUFFER_FRONT) {
1357 /* All pixmaps are owned by the server gpu.
1358 * When we use a different gpu, we can't use the pixmap
1359 * as buffer since it is potentially tiled a way
1360 * our device can't understand. In this case, use
1361 * a fake front buffer. Hopefully the pixmap
1362 * content will get synced with the fake front
1363 * buffer.
1364 */
1365 if (draw->is_pixmap && !draw->is_different_gpu)
1366 front = dri3_get_pixmap_buffer(driDrawable,
1367 format,
1368 loader_dri3_buffer_front,
1369 draw);
1370 else
1371 front = dri3_get_buffer(driDrawable,
1372 format,
1373 loader_dri3_buffer_front,
1374 draw);
1375
1376 if (!front)
1377 return false;
1378 } else {
1379 dri3_free_buffers(driDrawable, loader_dri3_buffer_front, draw);
1380 draw->have_fake_front = 0;
1381 }
1382
1383 if (buffer_mask & __DRI_IMAGE_BUFFER_BACK) {
1384 back = dri3_get_buffer(driDrawable,
1385 format,
1386 loader_dri3_buffer_back,
1387 draw);
1388 if (!back)
1389 return false;
1390 draw->have_back = 1;
1391 } else {
1392 dri3_free_buffers(driDrawable, loader_dri3_buffer_back, draw);
1393 draw->have_back = 0;
1394 }
1395
1396 if (front) {
1397 buffers->image_mask |= __DRI_IMAGE_BUFFER_FRONT;
1398 buffers->front = front->image;
1399 draw->have_fake_front = draw->is_different_gpu || !draw->is_pixmap;
1400 }
1401
1402 if (back) {
1403 buffers->image_mask |= __DRI_IMAGE_BUFFER_BACK;
1404 buffers->back = back->image;
1405 }
1406
1407 draw->stamp = stamp;
1408
1409 return true;
1410 }