loader/dri3: Destroy Present event context when destroying drawable v2
[mesa.git] / src / loader / loader_dri3_helper.c
1 /*
2 * Copyright © 2013 Keith Packard
3 * Copyright © 2015 Boyan Ding
4 *
5 * Permission to use, copy, modify, distribute, and sell this software and its
6 * documentation for any purpose is hereby granted without fee, provided that
7 * the above copyright notice appear in all copies and that both that copyright
8 * notice and this permission notice appear in supporting documentation, and
9 * that the name of the copyright holders not be used in advertising or
10 * publicity pertaining to distribution of the software without specific,
11 * written prior permission. The copyright holders make no representations
12 * about the suitability of this software for any purpose. It is provided "as
13 * is" without express or implied warranty.
14 *
15 * THE COPYRIGHT HOLDERS DISCLAIM ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
16 * INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO
17 * EVENT SHALL THE COPYRIGHT HOLDERS BE LIABLE FOR ANY SPECIAL, INDIRECT OR
18 * CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
19 * DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
20 * TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
21 * OF THIS SOFTWARE.
22 */
23
24 #include <fcntl.h>
25 #include <stdlib.h>
26 #include <unistd.h>
27
28 #include <X11/xshmfence.h>
29 #include <xcb/xcb.h>
30 #include <xcb/dri3.h>
31 #include <xcb/present.h>
32
33 #include <X11/Xlib-xcb.h>
34
35 #include "loader_dri3_helper.h"
36
37 /* From xmlpool/options.h, user exposed so should be stable */
38 #define DRI_CONF_VBLANK_NEVER 0
39 #define DRI_CONF_VBLANK_DEF_INTERVAL_0 1
40 #define DRI_CONF_VBLANK_DEF_INTERVAL_1 2
41 #define DRI_CONF_VBLANK_ALWAYS_SYNC 3
42
43 static inline void
44 dri3_fence_reset(xcb_connection_t *c, struct loader_dri3_buffer *buffer)
45 {
46 xshmfence_reset(buffer->shm_fence);
47 }
48
49 static inline void
50 dri3_fence_set(struct loader_dri3_buffer *buffer)
51 {
52 xshmfence_trigger(buffer->shm_fence);
53 }
54
55 static inline void
56 dri3_fence_trigger(xcb_connection_t *c, struct loader_dri3_buffer *buffer)
57 {
58 xcb_sync_trigger_fence(c, buffer->sync_fence);
59 }
60
61 static inline void
62 dri3_fence_await(xcb_connection_t *c, struct loader_dri3_buffer *buffer)
63 {
64 xcb_flush(c);
65 xshmfence_await(buffer->shm_fence);
66 }
67
68 static void
69 dri3_update_num_back(struct loader_dri3_drawable *draw)
70 {
71 draw->num_back = 1;
72 if (draw->flipping) {
73 if (!draw->is_pixmap &&
74 !(draw->present_capabilities & XCB_PRESENT_CAPABILITY_ASYNC))
75 draw->num_back++;
76 draw->num_back++;
77 }
78 if (draw->vtable->get_swap_interval(draw) == 0)
79 draw->num_back++;
80 }
81
82 void
83 loader_dri3_set_swap_interval(struct loader_dri3_drawable *draw, int interval)
84 {
85 interval = draw->vtable->clamp_swap_interval(draw, interval);
86 draw->vtable->set_swap_interval(draw, interval);
87 dri3_update_num_back(draw);
88 }
89
90 /** dri3_free_render_buffer
91 *
92 * Free everything associated with one render buffer including pixmap, fence
93 * stuff and the driver image
94 */
95 static void
96 dri3_free_render_buffer(struct loader_dri3_drawable *draw,
97 struct loader_dri3_buffer *buffer)
98 {
99 if (buffer->own_pixmap)
100 xcb_free_pixmap(draw->conn, buffer->pixmap);
101 xcb_sync_destroy_fence(draw->conn, buffer->sync_fence);
102 xshmfence_unmap_shm(buffer->shm_fence);
103 (draw->ext->image->destroyImage)(buffer->image);
104 if (buffer->linear_buffer)
105 (draw->ext->image->destroyImage)(buffer->linear_buffer);
106 free(buffer);
107 }
108
109 void
110 loader_dri3_drawable_fini(struct loader_dri3_drawable *draw)
111 {
112 int i;
113
114 (draw->ext->core->destroyDrawable)(draw->dri_drawable);
115
116 for (i = 0; i < LOADER_DRI3_NUM_BUFFERS; i++) {
117 if (draw->buffers[i])
118 dri3_free_render_buffer(draw, draw->buffers[i]);
119 }
120
121 if (draw->special_event) {
122 xcb_void_cookie_t cookie =
123 xcb_present_select_input_checked(draw->conn, draw->eid, draw->drawable,
124 XCB_PRESENT_EVENT_MASK_NO_EVENT);
125
126 xcb_discard_reply(draw->conn, cookie.sequence);
127 xcb_unregister_for_special_event(draw->conn, draw->special_event);
128 }
129 }
130
131 int
132 loader_dri3_drawable_init(xcb_connection_t *conn,
133 xcb_drawable_t drawable,
134 __DRIscreen *dri_screen,
135 bool is_different_gpu,
136 const __DRIconfig *dri_config,
137 struct loader_dri3_extensions *ext,
138 struct loader_dri3_vtable *vtable,
139 struct loader_dri3_drawable *draw)
140 {
141 xcb_get_geometry_cookie_t cookie;
142 xcb_get_geometry_reply_t *reply;
143 xcb_generic_error_t *error;
144 GLint vblank_mode = DRI_CONF_VBLANK_DEF_INTERVAL_1;
145 int swap_interval;
146
147 draw->conn = conn;
148 draw->ext = ext;
149 draw->vtable = vtable;
150 draw->drawable = drawable;
151 draw->dri_screen = dri_screen;
152 draw->is_different_gpu = is_different_gpu;
153
154 draw->have_back = 0;
155 draw->have_fake_front = 0;
156 draw->first_init = true;
157
158 if (draw->ext->config)
159 draw->ext->config->configQueryi(draw->dri_screen,
160 "vblank_mode", &vblank_mode);
161
162 switch (vblank_mode) {
163 case DRI_CONF_VBLANK_NEVER:
164 case DRI_CONF_VBLANK_DEF_INTERVAL_0:
165 swap_interval = 0;
166 break;
167 case DRI_CONF_VBLANK_DEF_INTERVAL_1:
168 case DRI_CONF_VBLANK_ALWAYS_SYNC:
169 default:
170 swap_interval = 1;
171 break;
172 }
173 draw->vtable->set_swap_interval(draw, swap_interval);
174
175 dri3_update_num_back(draw);
176
177 /* Create a new drawable */
178 draw->dri_drawable =
179 (draw->ext->image_driver->createNewDrawable)(dri_screen,
180 dri_config,
181 draw);
182
183 if (!draw->dri_drawable)
184 return 1;
185
186 cookie = xcb_get_geometry(draw->conn, draw->drawable);
187 reply = xcb_get_geometry_reply(draw->conn, cookie, &error);
188 if (reply == NULL || error != NULL) {
189 draw->ext->core->destroyDrawable(draw->dri_drawable);
190 return 1;
191 }
192
193 draw->width = reply->width;
194 draw->height = reply->height;
195 draw->depth = reply->depth;
196 draw->vtable->set_drawable_size(draw, draw->width, draw->height);
197 free(reply);
198
199 /*
200 * Make sure server has the same swap interval we do for the new
201 * drawable.
202 */
203 loader_dri3_set_swap_interval(draw, swap_interval);
204
205 return 0;
206 }
207
208 /*
209 * Process one Present event
210 */
211 static void
212 dri3_handle_present_event(struct loader_dri3_drawable *draw,
213 xcb_present_generic_event_t *ge)
214 {
215 switch (ge->evtype) {
216 case XCB_PRESENT_CONFIGURE_NOTIFY: {
217 xcb_present_configure_notify_event_t *ce = (void *) ge;
218
219 draw->width = ce->width;
220 draw->height = ce->height;
221 draw->vtable->set_drawable_size(draw, draw->width, draw->height);
222 break;
223 }
224 case XCB_PRESENT_COMPLETE_NOTIFY: {
225 xcb_present_complete_notify_event_t *ce = (void *) ge;
226
227 /* Compute the processed SBC number from the received 32-bit serial number
228 * merged with the upper 32-bits of the sent 64-bit serial number while
229 * checking for wrap.
230 */
231 if (ce->kind == XCB_PRESENT_COMPLETE_KIND_PIXMAP) {
232 draw->recv_sbc = (draw->send_sbc & 0xffffffff00000000LL) | ce->serial;
233 if (draw->recv_sbc > draw->send_sbc)
234 draw->recv_sbc -= 0x100000000;
235 switch (ce->mode) {
236 case XCB_PRESENT_COMPLETE_MODE_FLIP:
237 draw->flipping = true;
238 break;
239 case XCB_PRESENT_COMPLETE_MODE_COPY:
240 draw->flipping = false;
241 break;
242 }
243 dri3_update_num_back(draw);
244
245 if (draw->vtable->show_fps)
246 draw->vtable->show_fps(draw, ce->ust);
247
248 draw->ust = ce->ust;
249 draw->msc = ce->msc;
250 } else {
251 draw->recv_msc_serial = ce->serial;
252 draw->notify_ust = ce->ust;
253 draw->notify_msc = ce->msc;
254 }
255 break;
256 }
257 case XCB_PRESENT_EVENT_IDLE_NOTIFY: {
258 xcb_present_idle_notify_event_t *ie = (void *) ge;
259 int b;
260
261 for (b = 0; b < sizeof(draw->buffers) / sizeof(draw->buffers[0]); b++) {
262 struct loader_dri3_buffer *buf = draw->buffers[b];
263
264 if (buf && buf->pixmap == ie->pixmap) {
265 buf->busy = 0;
266 if (draw->num_back <= b && b < LOADER_DRI3_MAX_BACK) {
267 dri3_free_render_buffer(draw, buf);
268 draw->buffers[b] = NULL;
269 }
270 break;
271 }
272 }
273 break;
274 }
275 }
276 free(ge);
277 }
278
279 static bool
280 dri3_wait_for_event(struct loader_dri3_drawable *draw)
281 {
282 xcb_generic_event_t *ev;
283 xcb_present_generic_event_t *ge;
284
285 xcb_flush(draw->conn);
286 ev = xcb_wait_for_special_event(draw->conn, draw->special_event);
287 if (!ev)
288 return false;
289 ge = (void *) ev;
290 dri3_handle_present_event(draw, ge);
291 return true;
292 }
293
294 /** loader_dri3_wait_for_msc
295 *
296 * Get the X server to send an event when the target msc/divisor/remainder is
297 * reached.
298 */
299 bool
300 loader_dri3_wait_for_msc(struct loader_dri3_drawable *draw,
301 int64_t target_msc,
302 int64_t divisor, int64_t remainder,
303 int64_t *ust, int64_t *msc, int64_t *sbc)
304 {
305 uint32_t msc_serial;
306
307 msc_serial = ++draw->send_msc_serial;
308 xcb_present_notify_msc(draw->conn,
309 draw->drawable,
310 msc_serial,
311 target_msc,
312 divisor,
313 remainder);
314
315 xcb_flush(draw->conn);
316
317 /* Wait for the event */
318 if (draw->special_event) {
319 while ((int32_t) (msc_serial - draw->recv_msc_serial) > 0) {
320 if (!dri3_wait_for_event(draw))
321 return false;
322 }
323 }
324
325 *ust = draw->notify_ust;
326 *msc = draw->notify_msc;
327 *sbc = draw->recv_sbc;
328
329 return true;
330 }
331
332 /** loader_dri3_wait_for_sbc
333 *
334 * Wait for the completed swap buffer count to reach the specified
335 * target. Presumably the application knows that this will be reached with
336 * outstanding complete events, or we're going to be here awhile.
337 */
338 int
339 loader_dri3_wait_for_sbc(struct loader_dri3_drawable *draw,
340 int64_t target_sbc, int64_t *ust,
341 int64_t *msc, int64_t *sbc)
342 {
343 /* From the GLX_OML_sync_control spec:
344 *
345 * "If <target_sbc> = 0, the function will block until all previous
346 * swaps requested with glXSwapBuffersMscOML for that window have
347 * completed."
348 */
349 if (!target_sbc)
350 target_sbc = draw->send_sbc;
351
352 while (draw->recv_sbc < target_sbc) {
353 if (!dri3_wait_for_event(draw))
354 return 0;
355 }
356
357 *ust = draw->ust;
358 *msc = draw->msc;
359 *sbc = draw->recv_sbc;
360 return 1;
361 }
362
363 /** loader_dri3_find_back
364 *
365 * Find an idle back buffer. If there isn't one, then
366 * wait for a present idle notify event from the X server
367 */
368 static int
369 dri3_find_back(struct loader_dri3_drawable *draw)
370 {
371 int b;
372 xcb_generic_event_t *ev;
373 xcb_present_generic_event_t *ge;
374
375 for (;;) {
376 for (b = 0; b < draw->num_back; b++) {
377 int id = LOADER_DRI3_BACK_ID((b + draw->cur_back) % draw->num_back);
378 struct loader_dri3_buffer *buffer = draw->buffers[id];
379
380 if (!buffer || !buffer->busy) {
381 draw->cur_back = id;
382 return id;
383 }
384 }
385 xcb_flush(draw->conn);
386 ev = xcb_wait_for_special_event(draw->conn, draw->special_event);
387 if (!ev)
388 return -1;
389 ge = (void *) ev;
390 dri3_handle_present_event(draw, ge);
391 }
392 }
393
394 static xcb_gcontext_t
395 dri3_drawable_gc(struct loader_dri3_drawable *draw)
396 {
397 if (!draw->gc) {
398 uint32_t v = 0;
399 xcb_create_gc(draw->conn,
400 (draw->gc = xcb_generate_id(draw->conn)),
401 draw->drawable,
402 XCB_GC_GRAPHICS_EXPOSURES,
403 &v);
404 }
405 return draw->gc;
406 }
407
408
409 static struct loader_dri3_buffer *
410 dri3_back_buffer(struct loader_dri3_drawable *draw)
411 {
412 return draw->buffers[LOADER_DRI3_BACK_ID(draw->cur_back)];
413 }
414
415 static struct loader_dri3_buffer *
416 dri3_fake_front_buffer(struct loader_dri3_drawable *draw)
417 {
418 return draw->buffers[LOADER_DRI3_FRONT_ID];
419 }
420
421 static void
422 dri3_copy_area(xcb_connection_t *c,
423 xcb_drawable_t src_drawable,
424 xcb_drawable_t dst_drawable,
425 xcb_gcontext_t gc,
426 int16_t src_x,
427 int16_t src_y,
428 int16_t dst_x,
429 int16_t dst_y,
430 uint16_t width,
431 uint16_t height)
432 {
433 xcb_void_cookie_t cookie;
434
435 cookie = xcb_copy_area_checked(c,
436 src_drawable,
437 dst_drawable,
438 gc,
439 src_x,
440 src_y,
441 dst_x,
442 dst_y,
443 width,
444 height);
445 xcb_discard_reply(c, cookie.sequence);
446 }
447
448 /**
449 * Asks the driver to flush any queued work necessary for serializing with the
450 * X command stream, and optionally the slightly more strict requirement of
451 * glFlush() equivalence (which would require flushing even if nothing had
452 * been drawn to a window system framebuffer, for example).
453 */
454 void
455 loader_dri3_flush(struct loader_dri3_drawable *draw,
456 unsigned flags,
457 enum __DRI2throttleReason throttle_reason)
458 {
459 /* NEED TO CHECK WHETHER CONTEXT IS NULL */
460 __DRIcontext *dri_context = draw->vtable->get_dri_context(draw);
461
462 if (dri_context) {
463 draw->ext->flush->flush_with_flags(dri_context, draw->dri_drawable,
464 flags, throttle_reason);
465 }
466 }
467
468 void
469 loader_dri3_copy_sub_buffer(struct loader_dri3_drawable *draw,
470 int x, int y,
471 int width, int height,
472 bool flush)
473 {
474 struct loader_dri3_buffer *back;
475 unsigned flags = __DRI2_FLUSH_DRAWABLE;
476 __DRIcontext *dri_context;
477
478 dri_context = draw->vtable->get_dri_context(draw);
479
480 /* Check we have the right attachments */
481 if (!draw->have_back || draw->is_pixmap)
482 return;
483
484 if (flush)
485 flags |= __DRI2_FLUSH_CONTEXT;
486 loader_dri3_flush(draw, flags, __DRI2_THROTTLE_SWAPBUFFER);
487
488 back = dri3_back_buffer(draw);
489 y = draw->height - y - height;
490
491 if (draw->is_different_gpu && draw->vtable->in_current_context(draw)) {
492 /* Update the linear buffer part of the back buffer
493 * for the dri3_copy_area operation
494 */
495 draw->ext->image->blitImage(dri_context,
496 back->linear_buffer,
497 back->image,
498 0, 0, back->width,
499 back->height,
500 0, 0, back->width,
501 back->height, __BLIT_FLAG_FLUSH);
502 /* We use blitImage to update our fake front,
503 */
504 if (draw->have_fake_front)
505 draw->ext->image->blitImage(dri_context,
506 dri3_fake_front_buffer(draw)->image,
507 back->image,
508 x, y, width, height,
509 x, y, width, height, __BLIT_FLAG_FLUSH);
510 }
511
512 dri3_fence_reset(draw->conn, back);
513 dri3_copy_area(draw->conn,
514 dri3_back_buffer(draw)->pixmap,
515 draw->drawable,
516 dri3_drawable_gc(draw),
517 x, y, x, y, width, height);
518 dri3_fence_trigger(draw->conn, back);
519 /* Refresh the fake front (if present) after we just damaged the real
520 * front.
521 */
522 if (draw->have_fake_front && !draw->is_different_gpu) {
523 dri3_fence_reset(draw->conn, dri3_fake_front_buffer(draw));
524 dri3_copy_area(draw->conn,
525 dri3_back_buffer(draw)->pixmap,
526 dri3_fake_front_buffer(draw)->pixmap,
527 dri3_drawable_gc(draw),
528 x, y, x, y, width, height);
529 dri3_fence_trigger(draw->conn, dri3_fake_front_buffer(draw));
530 dri3_fence_await(draw->conn, dri3_fake_front_buffer(draw));
531 }
532 dri3_fence_await(draw->conn, back);
533 }
534
535 void
536 loader_dri3_copy_drawable(struct loader_dri3_drawable *draw,
537 xcb_drawable_t dest,
538 xcb_drawable_t src)
539 {
540 loader_dri3_flush(draw, __DRI2_FLUSH_DRAWABLE, 0);
541
542 dri3_fence_reset(draw->conn, dri3_fake_front_buffer(draw));
543 dri3_copy_area(draw->conn,
544 src, dest,
545 dri3_drawable_gc(draw),
546 0, 0, 0, 0, draw->width, draw->height);
547 dri3_fence_trigger(draw->conn, dri3_fake_front_buffer(draw));
548 dri3_fence_await(draw->conn, dri3_fake_front_buffer(draw));
549 }
550
551 void
552 loader_dri3_wait_x(struct loader_dri3_drawable *draw)
553 {
554 struct loader_dri3_buffer *front;
555 __DRIcontext *dri_context;
556
557 if (draw == NULL || !draw->have_fake_front)
558 return;
559
560 front = dri3_fake_front_buffer(draw);
561 dri_context = draw->vtable->get_dri_context(draw);
562
563 loader_dri3_copy_drawable(draw, front->pixmap, draw->drawable);
564
565 /* In the psc->is_different_gpu case, the linear buffer has been updated,
566 * but not yet the tiled buffer.
567 * Copy back to the tiled buffer we use for rendering.
568 * Note that we don't need flushing.
569 */
570 if (draw->is_different_gpu && draw->vtable->in_current_context(draw))
571 draw->ext->image->blitImage(dri_context,
572 front->image,
573 front->linear_buffer,
574 0, 0, front->width,
575 front->height,
576 0, 0, front->width,
577 front->height, 0);
578 }
579
580 void
581 loader_dri3_wait_gl(struct loader_dri3_drawable *draw)
582 {
583 struct loader_dri3_buffer *front;
584 __DRIcontext *dri_context;
585
586 if (draw == NULL || !draw->have_fake_front)
587 return;
588
589 front = dri3_fake_front_buffer(draw);
590 dri_context = draw->vtable->get_dri_context(draw);
591
592 /* In the psc->is_different_gpu case, we update the linear_buffer
593 * before updating the real front.
594 */
595 if (draw->is_different_gpu && draw->vtable->in_current_context(draw))
596 draw->ext->image->blitImage(dri_context,
597 front->linear_buffer,
598 front->image,
599 0, 0, front->width,
600 front->height,
601 0, 0, front->width,
602 front->height, __BLIT_FLAG_FLUSH);
603 loader_dri3_copy_drawable(draw, draw->drawable, front->pixmap);
604 }
605
606 /** dri3_flush_present_events
607 *
608 * Process any present events that have been received from the X server
609 */
610 static void
611 dri3_flush_present_events(struct loader_dri3_drawable *draw)
612 {
613 /* Check to see if any configuration changes have occurred
614 * since we were last invoked
615 */
616 if (draw->special_event) {
617 xcb_generic_event_t *ev;
618
619 while ((ev = xcb_poll_for_special_event(draw->conn,
620 draw->special_event)) != NULL) {
621 xcb_present_generic_event_t *ge = (void *) ev;
622 dri3_handle_present_event(draw, ge);
623 }
624 }
625 }
626
627 /** loader_dri3_swap_buffers_msc
628 *
629 * Make the current back buffer visible using the present extension
630 */
631 int64_t
632 loader_dri3_swap_buffers_msc(struct loader_dri3_drawable *draw,
633 int64_t target_msc, int64_t divisor,
634 int64_t remainder, unsigned flush_flags,
635 bool force_copy)
636 {
637 struct loader_dri3_buffer *back;
638 __DRIcontext *dri_context;
639 int64_t ret = 0;
640 uint32_t options = XCB_PRESENT_OPTION_NONE;
641 int swap_interval;
642
643 dri_context = draw->vtable->get_dri_context(draw);
644 swap_interval = draw->vtable->get_swap_interval(draw);
645
646 draw->vtable->flush_drawable(draw, flush_flags);
647
648 back = draw->buffers[LOADER_DRI3_BACK_ID(draw->cur_back)];
649 if (draw->is_different_gpu && back) {
650 /* Update the linear buffer before presenting the pixmap */
651 draw->ext->image->blitImage(dri_context,
652 back->linear_buffer,
653 back->image,
654 0, 0, back->width,
655 back->height,
656 0, 0, back->width,
657 back->height, __BLIT_FLAG_FLUSH);
658 /* Update the fake front */
659 if (draw->have_fake_front)
660 draw->ext->image->blitImage(dri_context,
661 draw->buffers[LOADER_DRI3_FRONT_ID]->image,
662 back->image,
663 0, 0, draw->width, draw->height,
664 0, 0, draw->width, draw->height,
665 __BLIT_FLAG_FLUSH);
666 }
667
668 dri3_flush_present_events(draw);
669
670 if (back && !draw->is_pixmap) {
671 dri3_fence_reset(draw->conn, back);
672
673 /* Compute when we want the frame shown by taking the last known
674 * successful MSC and adding in a swap interval for each outstanding swap
675 * request. target_msc=divisor=remainder=0 means "Use glXSwapBuffers()
676 * semantic"
677 */
678 ++draw->send_sbc;
679 if (target_msc == 0 && divisor == 0 && remainder == 0)
680 target_msc = draw->msc + swap_interval *
681 (draw->send_sbc - draw->recv_sbc);
682 else if (divisor == 0 && remainder > 0) {
683 /* From the GLX_OML_sync_control spec:
684 * "If <divisor> = 0, the swap will occur when MSC becomes
685 * greater than or equal to <target_msc>."
686 *
687 * Note that there's no mention of the remainder. The Present
688 * extension throws BadValue for remainder != 0 with divisor == 0, so
689 * just drop the passed in value.
690 */
691 remainder = 0;
692 }
693
694 /* From the GLX_EXT_swap_control spec
695 * and the EGL 1.4 spec (page 53):
696 *
697 * "If <interval> is set to a value of 0, buffer swaps are not
698 * synchronized to a video frame."
699 *
700 * Implementation note: It is possible to enable triple buffering
701 * behaviour by not using XCB_PRESENT_OPTION_ASYNC, but this should not be
702 * the default.
703 */
704 if (swap_interval == 0)
705 options |= XCB_PRESENT_OPTION_ASYNC;
706 if (force_copy)
707 options |= XCB_PRESENT_OPTION_COPY;
708
709 back->busy = 1;
710 back->last_swap = draw->send_sbc;
711 xcb_present_pixmap(draw->conn,
712 draw->drawable,
713 back->pixmap,
714 (uint32_t) draw->send_sbc,
715 0, /* valid */
716 0, /* update */
717 0, /* x_off */
718 0, /* y_off */
719 None, /* target_crtc */
720 None,
721 back->sync_fence,
722 options,
723 target_msc,
724 divisor,
725 remainder, 0, NULL);
726 ret = (int64_t) draw->send_sbc;
727
728 /* If there's a fake front, then copy the source back buffer
729 * to the fake front to keep it up to date. This needs
730 * to reset the fence and make future users block until
731 * the X server is done copying the bits
732 */
733 if (draw->have_fake_front && !draw->is_different_gpu) {
734 dri3_fence_reset(draw->conn, draw->buffers[LOADER_DRI3_FRONT_ID]);
735 dri3_copy_area(draw->conn,
736 back->pixmap,
737 draw->buffers[LOADER_DRI3_FRONT_ID]->pixmap,
738 dri3_drawable_gc(draw),
739 0, 0, 0, 0,
740 draw->width, draw->height);
741 dri3_fence_trigger(draw->conn, draw->buffers[LOADER_DRI3_FRONT_ID]);
742 }
743 xcb_flush(draw->conn);
744 if (draw->stamp)
745 ++(*draw->stamp);
746 }
747
748 (draw->ext->flush->invalidate)(draw->dri_drawable);
749
750 return ret;
751 }
752
753 int
754 loader_dri3_query_buffer_age(struct loader_dri3_drawable *draw)
755 {
756 int back_id = LOADER_DRI3_BACK_ID(dri3_find_back(draw));
757
758 if (back_id < 0 || !draw->buffers[back_id])
759 return 0;
760
761 if (draw->buffers[back_id]->last_swap != 0)
762 return draw->send_sbc - draw->buffers[back_id]->last_swap + 1;
763 else
764 return 0;
765 }
766
767 /** loader_dri3_open
768 *
769 * Wrapper around xcb_dri3_open
770 */
771 int
772 loader_dri3_open(xcb_connection_t *conn,
773 xcb_window_t root,
774 uint32_t provider)
775 {
776 xcb_dri3_open_cookie_t cookie;
777 xcb_dri3_open_reply_t *reply;
778 int fd;
779
780 cookie = xcb_dri3_open(conn,
781 root,
782 provider);
783
784 reply = xcb_dri3_open_reply(conn, cookie, NULL);
785 if (!reply)
786 return -1;
787
788 if (reply->nfd != 1) {
789 free(reply);
790 return -1;
791 }
792
793 fd = xcb_dri3_open_reply_fds(conn, reply)[0];
794 free(reply);
795 fcntl(fd, F_SETFD, fcntl(fd, F_GETFD) | FD_CLOEXEC);
796
797 return fd;
798 }
799
800 static uint32_t
801 dri3_cpp_for_format(uint32_t format) {
802 switch (format) {
803 case __DRI_IMAGE_FORMAT_R8:
804 return 1;
805 case __DRI_IMAGE_FORMAT_RGB565:
806 case __DRI_IMAGE_FORMAT_GR88:
807 return 2;
808 case __DRI_IMAGE_FORMAT_XRGB8888:
809 case __DRI_IMAGE_FORMAT_ARGB8888:
810 case __DRI_IMAGE_FORMAT_ABGR8888:
811 case __DRI_IMAGE_FORMAT_XBGR8888:
812 case __DRI_IMAGE_FORMAT_XRGB2101010:
813 case __DRI_IMAGE_FORMAT_ARGB2101010:
814 case __DRI_IMAGE_FORMAT_SARGB8:
815 return 4;
816 case __DRI_IMAGE_FORMAT_NONE:
817 default:
818 return 0;
819 }
820 }
821
822 /** loader_dri3_alloc_render_buffer
823 *
824 * Use the driver createImage function to construct a __DRIimage, then
825 * get a file descriptor for that and create an X pixmap from that
826 *
827 * Allocate an xshmfence for synchronization
828 */
829 static struct loader_dri3_buffer *
830 dri3_alloc_render_buffer(struct loader_dri3_drawable *draw, unsigned int format,
831 int width, int height, int depth)
832 {
833 struct loader_dri3_buffer *buffer;
834 __DRIimage *pixmap_buffer;
835 xcb_pixmap_t pixmap;
836 xcb_sync_fence_t sync_fence;
837 struct xshmfence *shm_fence;
838 int buffer_fd, fence_fd;
839 int stride;
840
841 /* Create an xshmfence object and
842 * prepare to send that to the X server
843 */
844
845 fence_fd = xshmfence_alloc_shm();
846 if (fence_fd < 0)
847 return NULL;
848
849 shm_fence = xshmfence_map_shm(fence_fd);
850 if (shm_fence == NULL)
851 goto no_shm_fence;
852
853 /* Allocate the image from the driver
854 */
855 buffer = calloc(1, sizeof *buffer);
856 if (!buffer)
857 goto no_buffer;
858
859 buffer->cpp = dri3_cpp_for_format(format);
860 if (!buffer->cpp)
861 goto no_image;
862
863 if (!draw->is_different_gpu) {
864 buffer->image = (draw->ext->image->createImage)(draw->dri_screen,
865 width, height,
866 format,
867 __DRI_IMAGE_USE_SHARE |
868 __DRI_IMAGE_USE_SCANOUT |
869 __DRI_IMAGE_USE_BACKBUFFER,
870 buffer);
871 pixmap_buffer = buffer->image;
872
873 if (!buffer->image)
874 goto no_image;
875 } else {
876 buffer->image = (draw->ext->image->createImage)(draw->dri_screen,
877 width, height,
878 format,
879 0,
880 buffer);
881
882 if (!buffer->image)
883 goto no_image;
884
885 buffer->linear_buffer =
886 (draw->ext->image->createImage)(draw->dri_screen,
887 width, height, format,
888 __DRI_IMAGE_USE_SHARE |
889 __DRI_IMAGE_USE_LINEAR |
890 __DRI_IMAGE_USE_BACKBUFFER,
891 buffer);
892 pixmap_buffer = buffer->linear_buffer;
893
894 if (!buffer->linear_buffer)
895 goto no_linear_buffer;
896 }
897
898 /* X wants the stride, so ask the image for it
899 */
900 if (!(draw->ext->image->queryImage)(pixmap_buffer, __DRI_IMAGE_ATTRIB_STRIDE,
901 &stride))
902 goto no_buffer_attrib;
903
904 buffer->pitch = stride;
905
906 if (!(draw->ext->image->queryImage)(pixmap_buffer, __DRI_IMAGE_ATTRIB_FD,
907 &buffer_fd))
908 goto no_buffer_attrib;
909
910 xcb_dri3_pixmap_from_buffer(draw->conn,
911 (pixmap = xcb_generate_id(draw->conn)),
912 draw->drawable,
913 buffer->size,
914 width, height, buffer->pitch,
915 depth, buffer->cpp * 8,
916 buffer_fd);
917
918 xcb_dri3_fence_from_fd(draw->conn,
919 pixmap,
920 (sync_fence = xcb_generate_id(draw->conn)),
921 false,
922 fence_fd);
923
924 buffer->pixmap = pixmap;
925 buffer->own_pixmap = true;
926 buffer->sync_fence = sync_fence;
927 buffer->shm_fence = shm_fence;
928 buffer->width = width;
929 buffer->height = height;
930
931 /* Mark the buffer as idle
932 */
933 dri3_fence_set(buffer);
934
935 return buffer;
936
937 no_buffer_attrib:
938 (draw->ext->image->destroyImage)(pixmap_buffer);
939 no_linear_buffer:
940 if (draw->is_different_gpu)
941 (draw->ext->image->destroyImage)(buffer->image);
942 no_image:
943 free(buffer);
944 no_buffer:
945 xshmfence_unmap_shm(shm_fence);
946 no_shm_fence:
947 close(fence_fd);
948 return NULL;
949 }
950
951 /** loader_dri3_update_drawable
952 *
953 * Called the first time we use the drawable and then
954 * after we receive present configure notify events to
955 * track the geometry of the drawable
956 */
957 static int
958 dri3_update_drawable(__DRIdrawable *driDrawable,
959 struct loader_dri3_drawable *draw)
960 {
961 if (draw->first_init) {
962 xcb_get_geometry_cookie_t geom_cookie;
963 xcb_get_geometry_reply_t *geom_reply;
964 xcb_void_cookie_t cookie;
965 xcb_generic_error_t *error;
966 xcb_present_query_capabilities_cookie_t present_capabilities_cookie;
967 xcb_present_query_capabilities_reply_t *present_capabilities_reply;
968
969 draw->first_init = false;
970
971 /* Try to select for input on the window.
972 *
973 * If the drawable is a window, this will get our events
974 * delivered.
975 *
976 * Otherwise, we'll get a BadWindow error back from this request which
977 * will let us know that the drawable is a pixmap instead.
978 */
979
980 draw->eid = xcb_generate_id(draw->conn);
981 cookie =
982 xcb_present_select_input_checked(draw->conn, draw->eid, draw->drawable,
983 XCB_PRESENT_EVENT_MASK_CONFIGURE_NOTIFY |
984 XCB_PRESENT_EVENT_MASK_COMPLETE_NOTIFY |
985 XCB_PRESENT_EVENT_MASK_IDLE_NOTIFY);
986
987 present_capabilities_cookie =
988 xcb_present_query_capabilities(draw->conn, draw->drawable);
989
990 /* Create an XCB event queue to hold present events outside of the usual
991 * application event queue
992 */
993 draw->special_event = xcb_register_for_special_xge(draw->conn,
994 &xcb_present_id,
995 draw->eid,
996 draw->stamp);
997 geom_cookie = xcb_get_geometry(draw->conn, draw->drawable);
998
999 geom_reply = xcb_get_geometry_reply(draw->conn, geom_cookie, NULL);
1000
1001 if (!geom_reply)
1002 return false;
1003
1004 draw->width = geom_reply->width;
1005 draw->height = geom_reply->height;
1006 draw->depth = geom_reply->depth;
1007 draw->vtable->set_drawable_size(draw, draw->width, draw->height);
1008
1009 free(geom_reply);
1010
1011 draw->is_pixmap = false;
1012
1013 /* Check to see if our select input call failed. If it failed with a
1014 * BadWindow error, then assume the drawable is a pixmap. Destroy the
1015 * special event queue created above and mark the drawable as a pixmap
1016 */
1017
1018 error = xcb_request_check(draw->conn, cookie);
1019
1020 present_capabilities_reply =
1021 xcb_present_query_capabilities_reply(draw->conn,
1022 present_capabilities_cookie,
1023 NULL);
1024
1025 if (present_capabilities_reply) {
1026 draw->present_capabilities = present_capabilities_reply->capabilities;
1027 free(present_capabilities_reply);
1028 } else
1029 draw->present_capabilities = 0;
1030
1031 if (error) {
1032 if (error->error_code != BadWindow) {
1033 free(error);
1034 return false;
1035 }
1036 draw->is_pixmap = true;
1037 xcb_unregister_for_special_event(draw->conn, draw->special_event);
1038 draw->special_event = NULL;
1039 }
1040 }
1041 dri3_flush_present_events(draw);
1042 return true;
1043 }
1044
1045 /* the DRIimage createImage function takes __DRI_IMAGE_FORMAT codes, while
1046 * the createImageFromFds call takes __DRI_IMAGE_FOURCC codes. To avoid
1047 * complete confusion, just deal in __DRI_IMAGE_FORMAT codes for now and
1048 * translate to __DRI_IMAGE_FOURCC codes in the call to createImageFromFds
1049 */
1050 static int
1051 image_format_to_fourcc(int format)
1052 {
1053
1054 /* Convert from __DRI_IMAGE_FORMAT to __DRI_IMAGE_FOURCC (sigh) */
1055 switch (format) {
1056 case __DRI_IMAGE_FORMAT_SARGB8: return __DRI_IMAGE_FOURCC_SARGB8888;
1057 case __DRI_IMAGE_FORMAT_RGB565: return __DRI_IMAGE_FOURCC_RGB565;
1058 case __DRI_IMAGE_FORMAT_XRGB8888: return __DRI_IMAGE_FOURCC_XRGB8888;
1059 case __DRI_IMAGE_FORMAT_ARGB8888: return __DRI_IMAGE_FOURCC_ARGB8888;
1060 case __DRI_IMAGE_FORMAT_ABGR8888: return __DRI_IMAGE_FOURCC_ABGR8888;
1061 case __DRI_IMAGE_FORMAT_XBGR8888: return __DRI_IMAGE_FOURCC_XBGR8888;
1062 }
1063 return 0;
1064 }
1065
1066 __DRIimage *
1067 loader_dri3_create_image(xcb_connection_t *c,
1068 xcb_dri3_buffer_from_pixmap_reply_t *bp_reply,
1069 unsigned int format,
1070 __DRIscreen *dri_screen,
1071 const __DRIimageExtension *image,
1072 void *loaderPrivate)
1073 {
1074 int *fds;
1075 __DRIimage *image_planar, *ret;
1076 int stride, offset;
1077
1078 /* Get an FD for the pixmap object
1079 */
1080 fds = xcb_dri3_buffer_from_pixmap_reply_fds(c, bp_reply);
1081
1082 stride = bp_reply->stride;
1083 offset = 0;
1084
1085 /* createImageFromFds creates a wrapper __DRIimage structure which
1086 * can deal with multiple planes for things like Yuv images. So, once
1087 * we've gotten the planar wrapper, pull the single plane out of it and
1088 * discard the wrapper.
1089 */
1090 image_planar = (image->createImageFromFds)(dri_screen,
1091 bp_reply->width,
1092 bp_reply->height,
1093 image_format_to_fourcc(format),
1094 fds, 1,
1095 &stride, &offset, loaderPrivate);
1096 close(fds[0]);
1097 if (!image_planar)
1098 return NULL;
1099
1100 ret = (image->fromPlanar)(image_planar, 0, loaderPrivate);
1101
1102 (image->destroyImage)(image_planar);
1103
1104 return ret;
1105 }
1106
1107 /** dri3_get_pixmap_buffer
1108 *
1109 * Get the DRM object for a pixmap from the X server and
1110 * wrap that with a __DRIimage structure using createImageFromFds
1111 */
1112 static struct loader_dri3_buffer *
1113 dri3_get_pixmap_buffer(__DRIdrawable *driDrawable, unsigned int format,
1114 enum loader_dri3_buffer_type buffer_type,
1115 struct loader_dri3_drawable *draw)
1116 {
1117 int buf_id = loader_dri3_pixmap_buf_id(buffer_type);
1118 struct loader_dri3_buffer *buffer = draw->buffers[buf_id];
1119 xcb_drawable_t pixmap;
1120 xcb_dri3_buffer_from_pixmap_cookie_t bp_cookie;
1121 xcb_dri3_buffer_from_pixmap_reply_t *bp_reply;
1122 xcb_sync_fence_t sync_fence;
1123 struct xshmfence *shm_fence;
1124 int fence_fd;
1125
1126 if (buffer)
1127 return buffer;
1128
1129 pixmap = draw->drawable;
1130
1131 buffer = calloc(1, sizeof *buffer);
1132 if (!buffer)
1133 goto no_buffer;
1134
1135 fence_fd = xshmfence_alloc_shm();
1136 if (fence_fd < 0)
1137 goto no_fence;
1138 shm_fence = xshmfence_map_shm(fence_fd);
1139 if (shm_fence == NULL) {
1140 close (fence_fd);
1141 goto no_fence;
1142 }
1143
1144 xcb_dri3_fence_from_fd(draw->conn,
1145 pixmap,
1146 (sync_fence = xcb_generate_id(draw->conn)),
1147 false,
1148 fence_fd);
1149
1150 bp_cookie = xcb_dri3_buffer_from_pixmap(draw->conn, pixmap);
1151 bp_reply = xcb_dri3_buffer_from_pixmap_reply(draw->conn, bp_cookie, NULL);
1152 if (!bp_reply)
1153 goto no_image;
1154
1155 buffer->image = loader_dri3_create_image(draw->conn, bp_reply, format,
1156 draw->dri_screen, draw->ext->image,
1157 buffer);
1158 if (!buffer->image)
1159 goto no_image;
1160
1161 buffer->pixmap = pixmap;
1162 buffer->own_pixmap = false;
1163 buffer->width = bp_reply->width;
1164 buffer->height = bp_reply->height;
1165 buffer->buffer_type = buffer_type;
1166 buffer->shm_fence = shm_fence;
1167 buffer->sync_fence = sync_fence;
1168
1169 draw->buffers[buf_id] = buffer;
1170
1171 free(bp_reply);
1172
1173 return buffer;
1174
1175 no_image:
1176 free(bp_reply);
1177 xcb_sync_destroy_fence(draw->conn, sync_fence);
1178 xshmfence_unmap_shm(shm_fence);
1179 no_fence:
1180 free(buffer);
1181 no_buffer:
1182 return NULL;
1183 }
1184
1185 /** dri3_get_buffer
1186 *
1187 * Find a front or back buffer, allocating new ones as necessary
1188 */
1189 static struct loader_dri3_buffer *
1190 dri3_get_buffer(__DRIdrawable *driDrawable,
1191 unsigned int format,
1192 enum loader_dri3_buffer_type buffer_type,
1193 struct loader_dri3_drawable *draw)
1194 {
1195 struct loader_dri3_buffer *buffer;
1196 int buf_id;
1197 __DRIcontext *dri_context;
1198
1199 dri_context = draw->vtable->get_dri_context(draw);
1200
1201 if (buffer_type == loader_dri3_buffer_back) {
1202 buf_id = dri3_find_back(draw);
1203
1204 if (buf_id < 0)
1205 return NULL;
1206 } else {
1207 buf_id = LOADER_DRI3_FRONT_ID;
1208 }
1209
1210 buffer = draw->buffers[buf_id];
1211
1212 /* Allocate a new buffer if there isn't an old one, or if that
1213 * old one is the wrong size
1214 */
1215 if (!buffer || buffer->width != draw->width ||
1216 buffer->height != draw->height) {
1217 struct loader_dri3_buffer *new_buffer;
1218
1219 /* Allocate the new buffers
1220 */
1221 new_buffer = dri3_alloc_render_buffer(draw,
1222 format,
1223 draw->width,
1224 draw->height,
1225 draw->depth);
1226 if (!new_buffer)
1227 return NULL;
1228
1229 /* When resizing, copy the contents of the old buffer, waiting for that
1230 * copy to complete using our fences before proceeding
1231 */
1232 switch (buffer_type) {
1233 case loader_dri3_buffer_back:
1234 if (buffer) {
1235 if (!buffer->linear_buffer) {
1236 dri3_fence_reset(draw->conn, new_buffer);
1237 dri3_fence_await(draw->conn, buffer);
1238 dri3_copy_area(draw->conn,
1239 buffer->pixmap,
1240 new_buffer->pixmap,
1241 dri3_drawable_gc(draw),
1242 0, 0, 0, 0,
1243 draw->width, draw->height);
1244 dri3_fence_trigger(draw->conn, new_buffer);
1245 } else if (draw->vtable->in_current_context(draw)) {
1246 draw->ext->image->blitImage(dri_context,
1247 new_buffer->image,
1248 buffer->image,
1249 0, 0, draw->width, draw->height,
1250 0, 0, draw->width, draw->height, 0);
1251 }
1252 dri3_free_render_buffer(draw, buffer);
1253 }
1254 break;
1255 case loader_dri3_buffer_front:
1256 dri3_fence_reset(draw->conn, new_buffer);
1257 dri3_copy_area(draw->conn,
1258 draw->drawable,
1259 new_buffer->pixmap,
1260 dri3_drawable_gc(draw),
1261 0, 0, 0, 0,
1262 draw->width, draw->height);
1263 dri3_fence_trigger(draw->conn, new_buffer);
1264
1265 if (new_buffer->linear_buffer &&
1266 draw->vtable->in_current_context(draw)) {
1267 dri3_fence_await(draw->conn, new_buffer);
1268 draw->ext->image->blitImage(dri_context,
1269 new_buffer->image,
1270 new_buffer->linear_buffer,
1271 0, 0, draw->width, draw->height,
1272 0, 0, draw->width, draw->height, 0);
1273 }
1274 break;
1275 }
1276 buffer = new_buffer;
1277 buffer->buffer_type = buffer_type;
1278 draw->buffers[buf_id] = buffer;
1279 }
1280 dri3_fence_await(draw->conn, buffer);
1281
1282 /* Return the requested buffer */
1283 return buffer;
1284 }
1285
1286 /** dri3_free_buffers
1287 *
1288 * Free the front bufffer or all of the back buffers. Used
1289 * when the application changes which buffers it needs
1290 */
1291 static void
1292 dri3_free_buffers(__DRIdrawable *driDrawable,
1293 enum loader_dri3_buffer_type buffer_type,
1294 struct loader_dri3_drawable *draw)
1295 {
1296 struct loader_dri3_buffer *buffer;
1297 int first_id;
1298 int n_id;
1299 int buf_id;
1300
1301 switch (buffer_type) {
1302 case loader_dri3_buffer_back:
1303 first_id = LOADER_DRI3_BACK_ID(0);
1304 n_id = LOADER_DRI3_MAX_BACK;
1305 break;
1306 case loader_dri3_buffer_front:
1307 first_id = LOADER_DRI3_FRONT_ID;
1308 n_id = 1;
1309 }
1310
1311 for (buf_id = first_id; buf_id < first_id + n_id; buf_id++) {
1312 buffer = draw->buffers[buf_id];
1313 if (buffer) {
1314 dri3_free_render_buffer(draw, buffer);
1315 draw->buffers[buf_id] = NULL;
1316 }
1317 }
1318 }
1319
1320 /** loader_dri3_get_buffers
1321 *
1322 * The published buffer allocation API.
1323 * Returns all of the necessary buffers, allocating
1324 * as needed.
1325 */
1326 int
1327 loader_dri3_get_buffers(__DRIdrawable *driDrawable,
1328 unsigned int format,
1329 uint32_t *stamp,
1330 void *loaderPrivate,
1331 uint32_t buffer_mask,
1332 struct __DRIimageList *buffers)
1333 {
1334 struct loader_dri3_drawable *draw = loaderPrivate;
1335 struct loader_dri3_buffer *front, *back;
1336
1337 buffers->image_mask = 0;
1338 buffers->front = NULL;
1339 buffers->back = NULL;
1340
1341 front = NULL;
1342 back = NULL;
1343
1344 if (!dri3_update_drawable(driDrawable, draw))
1345 return false;
1346
1347 /* pixmaps always have front buffers */
1348 if (draw->is_pixmap)
1349 buffer_mask |= __DRI_IMAGE_BUFFER_FRONT;
1350
1351 if (buffer_mask & __DRI_IMAGE_BUFFER_FRONT) {
1352 /* All pixmaps are owned by the server gpu.
1353 * When we use a different gpu, we can't use the pixmap
1354 * as buffer since it is potentially tiled a way
1355 * our device can't understand. In this case, use
1356 * a fake front buffer. Hopefully the pixmap
1357 * content will get synced with the fake front
1358 * buffer.
1359 */
1360 if (draw->is_pixmap && !draw->is_different_gpu)
1361 front = dri3_get_pixmap_buffer(driDrawable,
1362 format,
1363 loader_dri3_buffer_front,
1364 draw);
1365 else
1366 front = dri3_get_buffer(driDrawable,
1367 format,
1368 loader_dri3_buffer_front,
1369 draw);
1370
1371 if (!front)
1372 return false;
1373 } else {
1374 dri3_free_buffers(driDrawable, loader_dri3_buffer_front, draw);
1375 draw->have_fake_front = 0;
1376 }
1377
1378 if (buffer_mask & __DRI_IMAGE_BUFFER_BACK) {
1379 back = dri3_get_buffer(driDrawable,
1380 format,
1381 loader_dri3_buffer_back,
1382 draw);
1383 if (!back)
1384 return false;
1385 draw->have_back = 1;
1386 } else {
1387 dri3_free_buffers(driDrawable, loader_dri3_buffer_back, draw);
1388 draw->have_back = 0;
1389 }
1390
1391 if (front) {
1392 buffers->image_mask |= __DRI_IMAGE_BUFFER_FRONT;
1393 buffers->front = front->image;
1394 draw->have_fake_front = draw->is_different_gpu || !draw->is_pixmap;
1395 }
1396
1397 if (back) {
1398 buffers->image_mask |= __DRI_IMAGE_BUFFER_BACK;
1399 buffers->back = back->image;
1400 }
1401
1402 draw->stamp = stamp;
1403
1404 return true;
1405 }