loader_dri3: Increase the likelyhood of reusing the current swap buffer
[mesa.git] / src / loader / loader_dri3_helper.c
1 /*
2 * Copyright © 2013 Keith Packard
3 * Copyright © 2015 Boyan Ding
4 *
5 * Permission to use, copy, modify, distribute, and sell this software and its
6 * documentation for any purpose is hereby granted without fee, provided that
7 * the above copyright notice appear in all copies and that both that copyright
8 * notice and this permission notice appear in supporting documentation, and
9 * that the name of the copyright holders not be used in advertising or
10 * publicity pertaining to distribution of the software without specific,
11 * written prior permission. The copyright holders make no representations
12 * about the suitability of this software for any purpose. It is provided "as
13 * is" without express or implied warranty.
14 *
15 * THE COPYRIGHT HOLDERS DISCLAIM ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
16 * INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO
17 * EVENT SHALL THE COPYRIGHT HOLDERS BE LIABLE FOR ANY SPECIAL, INDIRECT OR
18 * CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
19 * DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
20 * TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
21 * OF THIS SOFTWARE.
22 */
23
24 #include <fcntl.h>
25 #include <stdlib.h>
26 #include <unistd.h>
27
28 #include <X11/xshmfence.h>
29 #include <xcb/xcb.h>
30 #include <xcb/dri3.h>
31 #include <xcb/present.h>
32
33 #include <X11/Xlib-xcb.h>
34
35 #include <c11/threads.h>
36 #include "loader_dri3_helper.h"
37
38 /* From xmlpool/options.h, user exposed so should be stable */
39 #define DRI_CONF_VBLANK_NEVER 0
40 #define DRI_CONF_VBLANK_DEF_INTERVAL_0 1
41 #define DRI_CONF_VBLANK_DEF_INTERVAL_1 2
42 #define DRI_CONF_VBLANK_ALWAYS_SYNC 3
43
44 /**
45 * A cached blit context.
46 */
47 struct loader_dri3_blit_context {
48 mtx_t mtx;
49 __DRIcontext *ctx;
50 __DRIscreen *cur_screen;
51 const __DRIcoreExtension *core;
52 };
53
54 /* For simplicity we maintain the cache only for a single screen at a time */
55 static struct loader_dri3_blit_context blit_context = {
56 _MTX_INITIALIZER_NP, NULL
57 };
58
59 static void
60 dri3_flush_present_events(struct loader_dri3_drawable *draw);
61
62 /**
63 * Do we have blit functionality in the image blit extension?
64 *
65 * \param draw[in] The drawable intended to blit from / to.
66 * \return true if we have blit functionality. false otherwise.
67 */
68 static bool loader_dri3_have_image_blit(const struct loader_dri3_drawable *draw)
69 {
70 return draw->ext->image->base.version >= 9 &&
71 draw->ext->image->blitImage != NULL;
72 }
73
74 /**
75 * Get and lock (for use with the current thread) a dri context associated
76 * with the drawable's dri screen. The context is intended to be used with
77 * the dri image extension's blitImage method.
78 *
79 * \param draw[in] Pointer to the drawable whose dri screen we want a
80 * dri context for.
81 * \return A dri context or NULL if context creation failed.
82 *
83 * When the caller is done with the context (even if the context returned was
84 * NULL), the caller must call loader_dri3_blit_context_put.
85 */
86 static __DRIcontext *
87 loader_dri3_blit_context_get(struct loader_dri3_drawable *draw)
88 {
89 mtx_lock(&blit_context.mtx);
90
91 if (blit_context.ctx && blit_context.cur_screen != draw->dri_screen) {
92 blit_context.core->destroyContext(blit_context.ctx);
93 blit_context.ctx = NULL;
94 }
95
96 if (!blit_context.ctx) {
97 blit_context.ctx = draw->ext->core->createNewContext(draw->dri_screen,
98 NULL, NULL, NULL);
99 blit_context.cur_screen = draw->dri_screen;
100 blit_context.core = draw->ext->core;
101 }
102
103 return blit_context.ctx;
104 }
105
106 /**
107 * Release (for use with other threads) a dri context previously obtained using
108 * loader_dri3_blit_context_get.
109 */
110 static void
111 loader_dri3_blit_context_put(void)
112 {
113 mtx_unlock(&blit_context.mtx);
114 }
115
116 /**
117 * Blit (parts of) the contents of a DRI image to another dri image
118 *
119 * \param draw[in] The drawable which owns the images.
120 * \param dst[in] The destination image.
121 * \param src[in] The source image.
122 * \param dstx0[in] Start destination coordinate.
123 * \param dsty0[in] Start destination coordinate.
124 * \param width[in] Blit width.
125 * \param height[in] Blit height.
126 * \param srcx0[in] Start source coordinate.
127 * \param srcy0[in] Start source coordinate.
128 * \param flush_flag[in] Image blit flush flag.
129 * \return true iff successful.
130 */
131 static bool
132 loader_dri3_blit_image(struct loader_dri3_drawable *draw,
133 __DRIimage *dst, __DRIimage *src,
134 int dstx0, int dsty0, int width, int height,
135 int srcx0, int srcy0, int flush_flag)
136 {
137 __DRIcontext *dri_context;
138 bool use_blit_context = false;
139
140 if (!loader_dri3_have_image_blit(draw))
141 return false;
142
143 dri_context = draw->vtable->get_dri_context(draw);
144
145 if (!dri_context || !draw->vtable->in_current_context(draw)) {
146 dri_context = loader_dri3_blit_context_get(draw);
147 use_blit_context = true;
148 flush_flag |= __BLIT_FLAG_FLUSH;
149 }
150
151 if (dri_context)
152 draw->ext->image->blitImage(dri_context, dst, src, dstx0, dsty0,
153 width, height, srcx0, srcy0,
154 width, height, flush_flag);
155
156 if (use_blit_context)
157 loader_dri3_blit_context_put();
158
159 return dri_context != NULL;
160 }
161
162 static inline void
163 dri3_fence_reset(xcb_connection_t *c, struct loader_dri3_buffer *buffer)
164 {
165 xshmfence_reset(buffer->shm_fence);
166 }
167
168 static inline void
169 dri3_fence_set(struct loader_dri3_buffer *buffer)
170 {
171 xshmfence_trigger(buffer->shm_fence);
172 }
173
174 static inline void
175 dri3_fence_trigger(xcb_connection_t *c, struct loader_dri3_buffer *buffer)
176 {
177 xcb_sync_trigger_fence(c, buffer->sync_fence);
178 }
179
180 static inline void
181 dri3_fence_await(xcb_connection_t *c, struct loader_dri3_buffer *buffer)
182 {
183 xcb_flush(c);
184 xshmfence_await(buffer->shm_fence);
185 }
186
187 static void
188 dri3_update_num_back(struct loader_dri3_drawable *draw)
189 {
190 if (draw->flipping)
191 draw->num_back = 3;
192 else
193 draw->num_back = 2;
194 }
195
196 void
197 loader_dri3_set_swap_interval(struct loader_dri3_drawable *draw, int interval)
198 {
199 draw->swap_interval = interval;
200 dri3_update_num_back(draw);
201 }
202
203 /** dri3_free_render_buffer
204 *
205 * Free everything associated with one render buffer including pixmap, fence
206 * stuff and the driver image
207 */
208 static void
209 dri3_free_render_buffer(struct loader_dri3_drawable *draw,
210 struct loader_dri3_buffer *buffer)
211 {
212 if (buffer->own_pixmap)
213 xcb_free_pixmap(draw->conn, buffer->pixmap);
214 xcb_sync_destroy_fence(draw->conn, buffer->sync_fence);
215 xshmfence_unmap_shm(buffer->shm_fence);
216 draw->ext->image->destroyImage(buffer->image);
217 if (buffer->linear_buffer)
218 draw->ext->image->destroyImage(buffer->linear_buffer);
219 free(buffer);
220 }
221
222 void
223 loader_dri3_drawable_fini(struct loader_dri3_drawable *draw)
224 {
225 int i;
226
227 draw->ext->core->destroyDrawable(draw->dri_drawable);
228
229 for (i = 0; i < LOADER_DRI3_NUM_BUFFERS; i++) {
230 if (draw->buffers[i])
231 dri3_free_render_buffer(draw, draw->buffers[i]);
232 }
233
234 if (draw->special_event) {
235 xcb_void_cookie_t cookie =
236 xcb_present_select_input_checked(draw->conn, draw->eid, draw->drawable,
237 XCB_PRESENT_EVENT_MASK_NO_EVENT);
238
239 xcb_discard_reply(draw->conn, cookie.sequence);
240 xcb_unregister_for_special_event(draw->conn, draw->special_event);
241 }
242 }
243
244 int
245 loader_dri3_drawable_init(xcb_connection_t *conn,
246 xcb_drawable_t drawable,
247 __DRIscreen *dri_screen,
248 bool is_different_gpu,
249 const __DRIconfig *dri_config,
250 struct loader_dri3_extensions *ext,
251 const struct loader_dri3_vtable *vtable,
252 struct loader_dri3_drawable *draw)
253 {
254 xcb_get_geometry_cookie_t cookie;
255 xcb_get_geometry_reply_t *reply;
256 xcb_generic_error_t *error;
257 GLint vblank_mode = DRI_CONF_VBLANK_DEF_INTERVAL_1;
258 int swap_interval;
259
260 draw->conn = conn;
261 draw->ext = ext;
262 draw->vtable = vtable;
263 draw->drawable = drawable;
264 draw->dri_screen = dri_screen;
265 draw->is_different_gpu = is_different_gpu;
266
267 draw->have_back = 0;
268 draw->have_fake_front = 0;
269 draw->first_init = true;
270
271 if (draw->ext->config)
272 draw->ext->config->configQueryi(draw->dri_screen,
273 "vblank_mode", &vblank_mode);
274
275 switch (vblank_mode) {
276 case DRI_CONF_VBLANK_NEVER:
277 case DRI_CONF_VBLANK_DEF_INTERVAL_0:
278 swap_interval = 0;
279 break;
280 case DRI_CONF_VBLANK_DEF_INTERVAL_1:
281 case DRI_CONF_VBLANK_ALWAYS_SYNC:
282 default:
283 swap_interval = 1;
284 break;
285 }
286 draw->swap_interval = swap_interval;
287
288 dri3_update_num_back(draw);
289
290 /* Create a new drawable */
291 draw->dri_drawable =
292 draw->ext->image_driver->createNewDrawable(dri_screen,
293 dri_config,
294 draw);
295
296 if (!draw->dri_drawable)
297 return 1;
298
299 cookie = xcb_get_geometry(draw->conn, draw->drawable);
300 reply = xcb_get_geometry_reply(draw->conn, cookie, &error);
301 if (reply == NULL || error != NULL) {
302 draw->ext->core->destroyDrawable(draw->dri_drawable);
303 return 1;
304 }
305
306 draw->width = reply->width;
307 draw->height = reply->height;
308 draw->depth = reply->depth;
309 draw->vtable->set_drawable_size(draw, draw->width, draw->height);
310 free(reply);
311
312 /*
313 * Make sure server has the same swap interval we do for the new
314 * drawable.
315 */
316 loader_dri3_set_swap_interval(draw, swap_interval);
317
318 return 0;
319 }
320
321 /*
322 * Process one Present event
323 */
324 static void
325 dri3_handle_present_event(struct loader_dri3_drawable *draw,
326 xcb_present_generic_event_t *ge)
327 {
328 switch (ge->evtype) {
329 case XCB_PRESENT_CONFIGURE_NOTIFY: {
330 xcb_present_configure_notify_event_t *ce = (void *) ge;
331
332 draw->width = ce->width;
333 draw->height = ce->height;
334 draw->vtable->set_drawable_size(draw, draw->width, draw->height);
335 break;
336 }
337 case XCB_PRESENT_COMPLETE_NOTIFY: {
338 xcb_present_complete_notify_event_t *ce = (void *) ge;
339
340 /* Compute the processed SBC number from the received 32-bit serial number
341 * merged with the upper 32-bits of the sent 64-bit serial number while
342 * checking for wrap.
343 */
344 if (ce->kind == XCB_PRESENT_COMPLETE_KIND_PIXMAP) {
345 draw->recv_sbc = (draw->send_sbc & 0xffffffff00000000LL) | ce->serial;
346 if (draw->recv_sbc > draw->send_sbc)
347 draw->recv_sbc -= 0x100000000;
348 switch (ce->mode) {
349 case XCB_PRESENT_COMPLETE_MODE_FLIP:
350 draw->flipping = true;
351 break;
352 case XCB_PRESENT_COMPLETE_MODE_COPY:
353 draw->flipping = false;
354 break;
355 }
356 dri3_update_num_back(draw);
357
358 if (draw->vtable->show_fps)
359 draw->vtable->show_fps(draw, ce->ust);
360
361 draw->ust = ce->ust;
362 draw->msc = ce->msc;
363 } else {
364 draw->recv_msc_serial = ce->serial;
365 draw->notify_ust = ce->ust;
366 draw->notify_msc = ce->msc;
367 }
368 break;
369 }
370 case XCB_PRESENT_EVENT_IDLE_NOTIFY: {
371 xcb_present_idle_notify_event_t *ie = (void *) ge;
372 int b;
373
374 for (b = 0; b < sizeof(draw->buffers) / sizeof(draw->buffers[0]); b++) {
375 struct loader_dri3_buffer *buf = draw->buffers[b];
376
377 if (buf && buf->pixmap == ie->pixmap) {
378 buf->busy = 0;
379 if (draw->num_back <= b && b < LOADER_DRI3_MAX_BACK) {
380 dri3_free_render_buffer(draw, buf);
381 draw->buffers[b] = NULL;
382 }
383 break;
384 }
385 }
386 break;
387 }
388 }
389 free(ge);
390 }
391
392 static bool
393 dri3_wait_for_event(struct loader_dri3_drawable *draw)
394 {
395 xcb_generic_event_t *ev;
396 xcb_present_generic_event_t *ge;
397
398 xcb_flush(draw->conn);
399 ev = xcb_wait_for_special_event(draw->conn, draw->special_event);
400 if (!ev)
401 return false;
402 ge = (void *) ev;
403 dri3_handle_present_event(draw, ge);
404 return true;
405 }
406
407 /** loader_dri3_wait_for_msc
408 *
409 * Get the X server to send an event when the target msc/divisor/remainder is
410 * reached.
411 */
412 bool
413 loader_dri3_wait_for_msc(struct loader_dri3_drawable *draw,
414 int64_t target_msc,
415 int64_t divisor, int64_t remainder,
416 int64_t *ust, int64_t *msc, int64_t *sbc)
417 {
418 uint32_t msc_serial;
419
420 msc_serial = ++draw->send_msc_serial;
421 xcb_present_notify_msc(draw->conn,
422 draw->drawable,
423 msc_serial,
424 target_msc,
425 divisor,
426 remainder);
427
428 xcb_flush(draw->conn);
429
430 /* Wait for the event */
431 if (draw->special_event) {
432 while ((int32_t) (msc_serial - draw->recv_msc_serial) > 0) {
433 if (!dri3_wait_for_event(draw))
434 return false;
435 }
436 }
437
438 *ust = draw->notify_ust;
439 *msc = draw->notify_msc;
440 *sbc = draw->recv_sbc;
441
442 return true;
443 }
444
445 /** loader_dri3_wait_for_sbc
446 *
447 * Wait for the completed swap buffer count to reach the specified
448 * target. Presumably the application knows that this will be reached with
449 * outstanding complete events, or we're going to be here awhile.
450 */
451 int
452 loader_dri3_wait_for_sbc(struct loader_dri3_drawable *draw,
453 int64_t target_sbc, int64_t *ust,
454 int64_t *msc, int64_t *sbc)
455 {
456 /* From the GLX_OML_sync_control spec:
457 *
458 * "If <target_sbc> = 0, the function will block until all previous
459 * swaps requested with glXSwapBuffersMscOML for that window have
460 * completed."
461 */
462 if (!target_sbc)
463 target_sbc = draw->send_sbc;
464
465 while (draw->recv_sbc < target_sbc) {
466 if (!dri3_wait_for_event(draw))
467 return 0;
468 }
469
470 *ust = draw->ust;
471 *msc = draw->msc;
472 *sbc = draw->recv_sbc;
473 return 1;
474 }
475
476 /** loader_dri3_find_back
477 *
478 * Find an idle back buffer. If there isn't one, then
479 * wait for a present idle notify event from the X server
480 */
481 static int
482 dri3_find_back(struct loader_dri3_drawable *draw)
483 {
484 int b;
485 xcb_generic_event_t *ev;
486 xcb_present_generic_event_t *ge;
487
488 /* Increase the likelyhood of reusing current buffer */
489 dri3_flush_present_events(draw);
490
491 for (;;) {
492 for (b = 0; b < draw->num_back; b++) {
493 int id = LOADER_DRI3_BACK_ID((b + draw->cur_back) % draw->num_back);
494 struct loader_dri3_buffer *buffer = draw->buffers[id];
495
496 if (!buffer || !buffer->busy) {
497 draw->cur_back = id;
498 return id;
499 }
500 }
501 xcb_flush(draw->conn);
502 ev = xcb_wait_for_special_event(draw->conn, draw->special_event);
503 if (!ev)
504 return -1;
505 ge = (void *) ev;
506 dri3_handle_present_event(draw, ge);
507 }
508 }
509
510 static xcb_gcontext_t
511 dri3_drawable_gc(struct loader_dri3_drawable *draw)
512 {
513 if (!draw->gc) {
514 uint32_t v = 0;
515 xcb_create_gc(draw->conn,
516 (draw->gc = xcb_generate_id(draw->conn)),
517 draw->drawable,
518 XCB_GC_GRAPHICS_EXPOSURES,
519 &v);
520 }
521 return draw->gc;
522 }
523
524
525 static struct loader_dri3_buffer *
526 dri3_back_buffer(struct loader_dri3_drawable *draw)
527 {
528 return draw->buffers[LOADER_DRI3_BACK_ID(draw->cur_back)];
529 }
530
531 static struct loader_dri3_buffer *
532 dri3_fake_front_buffer(struct loader_dri3_drawable *draw)
533 {
534 return draw->buffers[LOADER_DRI3_FRONT_ID];
535 }
536
537 static void
538 dri3_copy_area(xcb_connection_t *c,
539 xcb_drawable_t src_drawable,
540 xcb_drawable_t dst_drawable,
541 xcb_gcontext_t gc,
542 int16_t src_x,
543 int16_t src_y,
544 int16_t dst_x,
545 int16_t dst_y,
546 uint16_t width,
547 uint16_t height)
548 {
549 xcb_void_cookie_t cookie;
550
551 cookie = xcb_copy_area_checked(c,
552 src_drawable,
553 dst_drawable,
554 gc,
555 src_x,
556 src_y,
557 dst_x,
558 dst_y,
559 width,
560 height);
561 xcb_discard_reply(c, cookie.sequence);
562 }
563
564 /**
565 * Asks the driver to flush any queued work necessary for serializing with the
566 * X command stream, and optionally the slightly more strict requirement of
567 * glFlush() equivalence (which would require flushing even if nothing had
568 * been drawn to a window system framebuffer, for example).
569 */
570 void
571 loader_dri3_flush(struct loader_dri3_drawable *draw,
572 unsigned flags,
573 enum __DRI2throttleReason throttle_reason)
574 {
575 /* NEED TO CHECK WHETHER CONTEXT IS NULL */
576 __DRIcontext *dri_context = draw->vtable->get_dri_context(draw);
577
578 if (dri_context) {
579 draw->ext->flush->flush_with_flags(dri_context, draw->dri_drawable,
580 flags, throttle_reason);
581 }
582 }
583
584 void
585 loader_dri3_copy_sub_buffer(struct loader_dri3_drawable *draw,
586 int x, int y,
587 int width, int height,
588 bool flush)
589 {
590 struct loader_dri3_buffer *back;
591 unsigned flags = __DRI2_FLUSH_DRAWABLE;
592
593 /* Check we have the right attachments */
594 if (!draw->have_back || draw->is_pixmap)
595 return;
596
597 if (flush)
598 flags |= __DRI2_FLUSH_CONTEXT;
599 loader_dri3_flush(draw, flags, __DRI2_THROTTLE_SWAPBUFFER);
600
601 back = dri3_back_buffer(draw);
602 y = draw->height - y - height;
603
604 if (draw->is_different_gpu) {
605 /* Update the linear buffer part of the back buffer
606 * for the dri3_copy_area operation
607 */
608 (void) loader_dri3_blit_image(draw,
609 back->linear_buffer,
610 back->image,
611 0, 0, back->width, back->height,
612 0, 0, __BLIT_FLAG_FLUSH);
613 /* We use blit_image to update our fake front,
614 */
615 if (draw->have_fake_front)
616 (void) loader_dri3_blit_image(draw,
617 dri3_fake_front_buffer(draw)->image,
618 back->image,
619 x, y, width, height,
620 x, y, __BLIT_FLAG_FLUSH);
621 }
622
623 loader_dri3_swapbuffer_barrier(draw);
624 dri3_fence_reset(draw->conn, back);
625 dri3_copy_area(draw->conn,
626 dri3_back_buffer(draw)->pixmap,
627 draw->drawable,
628 dri3_drawable_gc(draw),
629 x, y, x, y, width, height);
630 dri3_fence_trigger(draw->conn, back);
631 /* Refresh the fake front (if present) after we just damaged the real
632 * front.
633 */
634 if (draw->have_fake_front && !draw->is_different_gpu) {
635 dri3_fence_reset(draw->conn, dri3_fake_front_buffer(draw));
636 dri3_copy_area(draw->conn,
637 dri3_back_buffer(draw)->pixmap,
638 dri3_fake_front_buffer(draw)->pixmap,
639 dri3_drawable_gc(draw),
640 x, y, x, y, width, height);
641 dri3_fence_trigger(draw->conn, dri3_fake_front_buffer(draw));
642 dri3_fence_await(draw->conn, dri3_fake_front_buffer(draw));
643 }
644 dri3_fence_await(draw->conn, back);
645 }
646
647 void
648 loader_dri3_copy_drawable(struct loader_dri3_drawable *draw,
649 xcb_drawable_t dest,
650 xcb_drawable_t src)
651 {
652 loader_dri3_flush(draw, __DRI2_FLUSH_DRAWABLE, 0);
653
654 dri3_fence_reset(draw->conn, dri3_fake_front_buffer(draw));
655 dri3_copy_area(draw->conn,
656 src, dest,
657 dri3_drawable_gc(draw),
658 0, 0, 0, 0, draw->width, draw->height);
659 dri3_fence_trigger(draw->conn, dri3_fake_front_buffer(draw));
660 dri3_fence_await(draw->conn, dri3_fake_front_buffer(draw));
661 }
662
663 void
664 loader_dri3_wait_x(struct loader_dri3_drawable *draw)
665 {
666 struct loader_dri3_buffer *front;
667
668 if (draw == NULL || !draw->have_fake_front)
669 return;
670
671 front = dri3_fake_front_buffer(draw);
672
673 loader_dri3_copy_drawable(draw, front->pixmap, draw->drawable);
674
675 /* In the psc->is_different_gpu case, the linear buffer has been updated,
676 * but not yet the tiled buffer.
677 * Copy back to the tiled buffer we use for rendering.
678 * Note that we don't need flushing.
679 */
680 if (draw->is_different_gpu)
681 (void) loader_dri3_blit_image(draw,
682 front->image,
683 front->linear_buffer,
684 0, 0, front->width, front->height,
685 0, 0, 0);
686 }
687
688 void
689 loader_dri3_wait_gl(struct loader_dri3_drawable *draw)
690 {
691 struct loader_dri3_buffer *front;
692
693 if (draw == NULL || !draw->have_fake_front)
694 return;
695
696 front = dri3_fake_front_buffer(draw);
697
698 /* In the psc->is_different_gpu case, we update the linear_buffer
699 * before updating the real front.
700 */
701 if (draw->is_different_gpu)
702 (void) loader_dri3_blit_image(draw,
703 front->linear_buffer,
704 front->image,
705 0, 0, front->width, front->height,
706 0, 0, __BLIT_FLAG_FLUSH);
707 loader_dri3_swapbuffer_barrier(draw);
708 loader_dri3_copy_drawable(draw, draw->drawable, front->pixmap);
709 }
710
711 /** dri3_flush_present_events
712 *
713 * Process any present events that have been received from the X server
714 */
715 static void
716 dri3_flush_present_events(struct loader_dri3_drawable *draw)
717 {
718 /* Check to see if any configuration changes have occurred
719 * since we were last invoked
720 */
721 if (draw->special_event) {
722 xcb_generic_event_t *ev;
723
724 while ((ev = xcb_poll_for_special_event(draw->conn,
725 draw->special_event)) != NULL) {
726 xcb_present_generic_event_t *ge = (void *) ev;
727 dri3_handle_present_event(draw, ge);
728 }
729 }
730 }
731
732 /** loader_dri3_swap_buffers_msc
733 *
734 * Make the current back buffer visible using the present extension
735 */
736 int64_t
737 loader_dri3_swap_buffers_msc(struct loader_dri3_drawable *draw,
738 int64_t target_msc, int64_t divisor,
739 int64_t remainder, unsigned flush_flags,
740 bool force_copy)
741 {
742 struct loader_dri3_buffer *back;
743 int64_t ret = 0;
744 uint32_t options = XCB_PRESENT_OPTION_NONE;
745
746 draw->vtable->flush_drawable(draw, flush_flags);
747
748 back = draw->buffers[dri3_find_back(draw)];
749 if (draw->is_different_gpu && back) {
750 /* Update the linear buffer before presenting the pixmap */
751 (void) loader_dri3_blit_image(draw,
752 back->linear_buffer,
753 back->image,
754 0, 0, back->width, back->height,
755 0, 0, __BLIT_FLAG_FLUSH);
756 /* Update the fake front */
757 if (draw->have_fake_front)
758 (void) loader_dri3_blit_image(draw,
759 draw->buffers[LOADER_DRI3_FRONT_ID]->image,
760 back->image,
761 0, 0, draw->width, draw->height,
762 0, 0, __BLIT_FLAG_FLUSH);
763 }
764
765 dri3_flush_present_events(draw);
766
767 if (back && !draw->is_pixmap) {
768 dri3_fence_reset(draw->conn, back);
769
770 /* Compute when we want the frame shown by taking the last known
771 * successful MSC and adding in a swap interval for each outstanding swap
772 * request. target_msc=divisor=remainder=0 means "Use glXSwapBuffers()
773 * semantic"
774 */
775 ++draw->send_sbc;
776 if (target_msc == 0 && divisor == 0 && remainder == 0)
777 target_msc = draw->msc + draw->swap_interval *
778 (draw->send_sbc - draw->recv_sbc);
779 else if (divisor == 0 && remainder > 0) {
780 /* From the GLX_OML_sync_control spec:
781 * "If <divisor> = 0, the swap will occur when MSC becomes
782 * greater than or equal to <target_msc>."
783 *
784 * Note that there's no mention of the remainder. The Present
785 * extension throws BadValue for remainder != 0 with divisor == 0, so
786 * just drop the passed in value.
787 */
788 remainder = 0;
789 }
790
791 /* From the GLX_EXT_swap_control spec
792 * and the EGL 1.4 spec (page 53):
793 *
794 * "If <interval> is set to a value of 0, buffer swaps are not
795 * synchronized to a video frame."
796 *
797 * Implementation note: It is possible to enable triple buffering
798 * behaviour by not using XCB_PRESENT_OPTION_ASYNC, but this should not be
799 * the default.
800 */
801 if (draw->swap_interval == 0)
802 options |= XCB_PRESENT_OPTION_ASYNC;
803 if (force_copy)
804 options |= XCB_PRESENT_OPTION_COPY;
805
806 back->busy = 1;
807 back->last_swap = draw->send_sbc;
808 xcb_present_pixmap(draw->conn,
809 draw->drawable,
810 back->pixmap,
811 (uint32_t) draw->send_sbc,
812 0, /* valid */
813 0, /* update */
814 0, /* x_off */
815 0, /* y_off */
816 None, /* target_crtc */
817 None,
818 back->sync_fence,
819 options,
820 target_msc,
821 divisor,
822 remainder, 0, NULL);
823 ret = (int64_t) draw->send_sbc;
824
825 /* If there's a fake front, then copy the source back buffer
826 * to the fake front to keep it up to date. This needs
827 * to reset the fence and make future users block until
828 * the X server is done copying the bits
829 */
830 if (draw->have_fake_front && !draw->is_different_gpu) {
831 dri3_fence_reset(draw->conn, draw->buffers[LOADER_DRI3_FRONT_ID]);
832 dri3_copy_area(draw->conn,
833 back->pixmap,
834 draw->buffers[LOADER_DRI3_FRONT_ID]->pixmap,
835 dri3_drawable_gc(draw),
836 0, 0, 0, 0,
837 draw->width, draw->height);
838 dri3_fence_trigger(draw->conn, draw->buffers[LOADER_DRI3_FRONT_ID]);
839 }
840 xcb_flush(draw->conn);
841 if (draw->stamp)
842 ++(*draw->stamp);
843 }
844
845 draw->ext->flush->invalidate(draw->dri_drawable);
846
847 return ret;
848 }
849
850 int
851 loader_dri3_query_buffer_age(struct loader_dri3_drawable *draw)
852 {
853 int back_id = LOADER_DRI3_BACK_ID(dri3_find_back(draw));
854
855 if (back_id < 0 || !draw->buffers[back_id])
856 return 0;
857
858 if (draw->buffers[back_id]->last_swap != 0)
859 return draw->send_sbc - draw->buffers[back_id]->last_swap + 1;
860 else
861 return 0;
862 }
863
864 /** loader_dri3_open
865 *
866 * Wrapper around xcb_dri3_open
867 */
868 int
869 loader_dri3_open(xcb_connection_t *conn,
870 xcb_window_t root,
871 uint32_t provider)
872 {
873 xcb_dri3_open_cookie_t cookie;
874 xcb_dri3_open_reply_t *reply;
875 int fd;
876
877 cookie = xcb_dri3_open(conn,
878 root,
879 provider);
880
881 reply = xcb_dri3_open_reply(conn, cookie, NULL);
882 if (!reply)
883 return -1;
884
885 if (reply->nfd != 1) {
886 free(reply);
887 return -1;
888 }
889
890 fd = xcb_dri3_open_reply_fds(conn, reply)[0];
891 free(reply);
892 fcntl(fd, F_SETFD, fcntl(fd, F_GETFD) | FD_CLOEXEC);
893
894 return fd;
895 }
896
897 static uint32_t
898 dri3_cpp_for_format(uint32_t format) {
899 switch (format) {
900 case __DRI_IMAGE_FORMAT_R8:
901 return 1;
902 case __DRI_IMAGE_FORMAT_RGB565:
903 case __DRI_IMAGE_FORMAT_GR88:
904 return 2;
905 case __DRI_IMAGE_FORMAT_XRGB8888:
906 case __DRI_IMAGE_FORMAT_ARGB8888:
907 case __DRI_IMAGE_FORMAT_ABGR8888:
908 case __DRI_IMAGE_FORMAT_XBGR8888:
909 case __DRI_IMAGE_FORMAT_XRGB2101010:
910 case __DRI_IMAGE_FORMAT_ARGB2101010:
911 case __DRI_IMAGE_FORMAT_SARGB8:
912 return 4;
913 case __DRI_IMAGE_FORMAT_NONE:
914 default:
915 return 0;
916 }
917 }
918
919 /** loader_dri3_alloc_render_buffer
920 *
921 * Use the driver createImage function to construct a __DRIimage, then
922 * get a file descriptor for that and create an X pixmap from that
923 *
924 * Allocate an xshmfence for synchronization
925 */
926 static struct loader_dri3_buffer *
927 dri3_alloc_render_buffer(struct loader_dri3_drawable *draw, unsigned int format,
928 int width, int height, int depth)
929 {
930 struct loader_dri3_buffer *buffer;
931 __DRIimage *pixmap_buffer;
932 xcb_pixmap_t pixmap;
933 xcb_sync_fence_t sync_fence;
934 struct xshmfence *shm_fence;
935 int buffer_fd, fence_fd;
936 int stride;
937
938 /* Create an xshmfence object and
939 * prepare to send that to the X server
940 */
941
942 fence_fd = xshmfence_alloc_shm();
943 if (fence_fd < 0)
944 return NULL;
945
946 shm_fence = xshmfence_map_shm(fence_fd);
947 if (shm_fence == NULL)
948 goto no_shm_fence;
949
950 /* Allocate the image from the driver
951 */
952 buffer = calloc(1, sizeof *buffer);
953 if (!buffer)
954 goto no_buffer;
955
956 buffer->cpp = dri3_cpp_for_format(format);
957 if (!buffer->cpp)
958 goto no_image;
959
960 if (!draw->is_different_gpu) {
961 buffer->image = draw->ext->image->createImage(draw->dri_screen,
962 width, height,
963 format,
964 __DRI_IMAGE_USE_SHARE |
965 __DRI_IMAGE_USE_SCANOUT |
966 __DRI_IMAGE_USE_BACKBUFFER,
967 buffer);
968 pixmap_buffer = buffer->image;
969
970 if (!buffer->image)
971 goto no_image;
972 } else {
973 buffer->image = draw->ext->image->createImage(draw->dri_screen,
974 width, height,
975 format,
976 0,
977 buffer);
978
979 if (!buffer->image)
980 goto no_image;
981
982 buffer->linear_buffer =
983 draw->ext->image->createImage(draw->dri_screen,
984 width, height, format,
985 __DRI_IMAGE_USE_SHARE |
986 __DRI_IMAGE_USE_LINEAR |
987 __DRI_IMAGE_USE_BACKBUFFER,
988 buffer);
989 pixmap_buffer = buffer->linear_buffer;
990
991 if (!buffer->linear_buffer)
992 goto no_linear_buffer;
993 }
994
995 /* X wants the stride, so ask the image for it
996 */
997 if (!draw->ext->image->queryImage(pixmap_buffer, __DRI_IMAGE_ATTRIB_STRIDE,
998 &stride))
999 goto no_buffer_attrib;
1000
1001 buffer->pitch = stride;
1002
1003 if (!draw->ext->image->queryImage(pixmap_buffer, __DRI_IMAGE_ATTRIB_FD,
1004 &buffer_fd))
1005 goto no_buffer_attrib;
1006
1007 xcb_dri3_pixmap_from_buffer(draw->conn,
1008 (pixmap = xcb_generate_id(draw->conn)),
1009 draw->drawable,
1010 buffer->size,
1011 width, height, buffer->pitch,
1012 depth, buffer->cpp * 8,
1013 buffer_fd);
1014
1015 xcb_dri3_fence_from_fd(draw->conn,
1016 pixmap,
1017 (sync_fence = xcb_generate_id(draw->conn)),
1018 false,
1019 fence_fd);
1020
1021 buffer->pixmap = pixmap;
1022 buffer->own_pixmap = true;
1023 buffer->sync_fence = sync_fence;
1024 buffer->shm_fence = shm_fence;
1025 buffer->width = width;
1026 buffer->height = height;
1027
1028 /* Mark the buffer as idle
1029 */
1030 dri3_fence_set(buffer);
1031
1032 return buffer;
1033
1034 no_buffer_attrib:
1035 draw->ext->image->destroyImage(pixmap_buffer);
1036 no_linear_buffer:
1037 if (draw->is_different_gpu)
1038 draw->ext->image->destroyImage(buffer->image);
1039 no_image:
1040 free(buffer);
1041 no_buffer:
1042 xshmfence_unmap_shm(shm_fence);
1043 no_shm_fence:
1044 close(fence_fd);
1045 return NULL;
1046 }
1047
1048 /** loader_dri3_update_drawable
1049 *
1050 * Called the first time we use the drawable and then
1051 * after we receive present configure notify events to
1052 * track the geometry of the drawable
1053 */
1054 static int
1055 dri3_update_drawable(__DRIdrawable *driDrawable,
1056 struct loader_dri3_drawable *draw)
1057 {
1058 if (draw->first_init) {
1059 xcb_get_geometry_cookie_t geom_cookie;
1060 xcb_get_geometry_reply_t *geom_reply;
1061 xcb_void_cookie_t cookie;
1062 xcb_generic_error_t *error;
1063 xcb_present_query_capabilities_cookie_t present_capabilities_cookie;
1064 xcb_present_query_capabilities_reply_t *present_capabilities_reply;
1065
1066 draw->first_init = false;
1067
1068 /* Try to select for input on the window.
1069 *
1070 * If the drawable is a window, this will get our events
1071 * delivered.
1072 *
1073 * Otherwise, we'll get a BadWindow error back from this request which
1074 * will let us know that the drawable is a pixmap instead.
1075 */
1076
1077 draw->eid = xcb_generate_id(draw->conn);
1078 cookie =
1079 xcb_present_select_input_checked(draw->conn, draw->eid, draw->drawable,
1080 XCB_PRESENT_EVENT_MASK_CONFIGURE_NOTIFY |
1081 XCB_PRESENT_EVENT_MASK_COMPLETE_NOTIFY |
1082 XCB_PRESENT_EVENT_MASK_IDLE_NOTIFY);
1083
1084 present_capabilities_cookie =
1085 xcb_present_query_capabilities(draw->conn, draw->drawable);
1086
1087 /* Create an XCB event queue to hold present events outside of the usual
1088 * application event queue
1089 */
1090 draw->special_event = xcb_register_for_special_xge(draw->conn,
1091 &xcb_present_id,
1092 draw->eid,
1093 draw->stamp);
1094 geom_cookie = xcb_get_geometry(draw->conn, draw->drawable);
1095
1096 geom_reply = xcb_get_geometry_reply(draw->conn, geom_cookie, NULL);
1097
1098 if (!geom_reply)
1099 return false;
1100
1101 draw->width = geom_reply->width;
1102 draw->height = geom_reply->height;
1103 draw->depth = geom_reply->depth;
1104 draw->vtable->set_drawable_size(draw, draw->width, draw->height);
1105
1106 free(geom_reply);
1107
1108 draw->is_pixmap = false;
1109
1110 /* Check to see if our select input call failed. If it failed with a
1111 * BadWindow error, then assume the drawable is a pixmap. Destroy the
1112 * special event queue created above and mark the drawable as a pixmap
1113 */
1114
1115 error = xcb_request_check(draw->conn, cookie);
1116
1117 present_capabilities_reply =
1118 xcb_present_query_capabilities_reply(draw->conn,
1119 present_capabilities_cookie,
1120 NULL);
1121
1122 if (present_capabilities_reply) {
1123 draw->present_capabilities = present_capabilities_reply->capabilities;
1124 free(present_capabilities_reply);
1125 } else
1126 draw->present_capabilities = 0;
1127
1128 if (error) {
1129 if (error->error_code != BadWindow) {
1130 free(error);
1131 return false;
1132 }
1133 draw->is_pixmap = true;
1134 xcb_unregister_for_special_event(draw->conn, draw->special_event);
1135 draw->special_event = NULL;
1136 }
1137 }
1138 dri3_flush_present_events(draw);
1139 return true;
1140 }
1141
1142 /* the DRIimage createImage function takes __DRI_IMAGE_FORMAT codes, while
1143 * the createImageFromFds call takes __DRI_IMAGE_FOURCC codes. To avoid
1144 * complete confusion, just deal in __DRI_IMAGE_FORMAT codes for now and
1145 * translate to __DRI_IMAGE_FOURCC codes in the call to createImageFromFds
1146 */
1147 static int
1148 image_format_to_fourcc(int format)
1149 {
1150
1151 /* Convert from __DRI_IMAGE_FORMAT to __DRI_IMAGE_FOURCC (sigh) */
1152 switch (format) {
1153 case __DRI_IMAGE_FORMAT_SARGB8: return __DRI_IMAGE_FOURCC_SARGB8888;
1154 case __DRI_IMAGE_FORMAT_RGB565: return __DRI_IMAGE_FOURCC_RGB565;
1155 case __DRI_IMAGE_FORMAT_XRGB8888: return __DRI_IMAGE_FOURCC_XRGB8888;
1156 case __DRI_IMAGE_FORMAT_ARGB8888: return __DRI_IMAGE_FOURCC_ARGB8888;
1157 case __DRI_IMAGE_FORMAT_ABGR8888: return __DRI_IMAGE_FOURCC_ABGR8888;
1158 case __DRI_IMAGE_FORMAT_XBGR8888: return __DRI_IMAGE_FOURCC_XBGR8888;
1159 }
1160 return 0;
1161 }
1162
1163 __DRIimage *
1164 loader_dri3_create_image(xcb_connection_t *c,
1165 xcb_dri3_buffer_from_pixmap_reply_t *bp_reply,
1166 unsigned int format,
1167 __DRIscreen *dri_screen,
1168 const __DRIimageExtension *image,
1169 void *loaderPrivate)
1170 {
1171 int *fds;
1172 __DRIimage *image_planar, *ret;
1173 int stride, offset;
1174
1175 /* Get an FD for the pixmap object
1176 */
1177 fds = xcb_dri3_buffer_from_pixmap_reply_fds(c, bp_reply);
1178
1179 stride = bp_reply->stride;
1180 offset = 0;
1181
1182 /* createImageFromFds creates a wrapper __DRIimage structure which
1183 * can deal with multiple planes for things like Yuv images. So, once
1184 * we've gotten the planar wrapper, pull the single plane out of it and
1185 * discard the wrapper.
1186 */
1187 image_planar = image->createImageFromFds(dri_screen,
1188 bp_reply->width,
1189 bp_reply->height,
1190 image_format_to_fourcc(format),
1191 fds, 1,
1192 &stride, &offset, loaderPrivate);
1193 close(fds[0]);
1194 if (!image_planar)
1195 return NULL;
1196
1197 ret = image->fromPlanar(image_planar, 0, loaderPrivate);
1198
1199 image->destroyImage(image_planar);
1200
1201 return ret;
1202 }
1203
1204 /** dri3_get_pixmap_buffer
1205 *
1206 * Get the DRM object for a pixmap from the X server and
1207 * wrap that with a __DRIimage structure using createImageFromFds
1208 */
1209 static struct loader_dri3_buffer *
1210 dri3_get_pixmap_buffer(__DRIdrawable *driDrawable, unsigned int format,
1211 enum loader_dri3_buffer_type buffer_type,
1212 struct loader_dri3_drawable *draw)
1213 {
1214 int buf_id = loader_dri3_pixmap_buf_id(buffer_type);
1215 struct loader_dri3_buffer *buffer = draw->buffers[buf_id];
1216 xcb_drawable_t pixmap;
1217 xcb_dri3_buffer_from_pixmap_cookie_t bp_cookie;
1218 xcb_dri3_buffer_from_pixmap_reply_t *bp_reply;
1219 xcb_sync_fence_t sync_fence;
1220 struct xshmfence *shm_fence;
1221 int fence_fd;
1222
1223 if (buffer)
1224 return buffer;
1225
1226 pixmap = draw->drawable;
1227
1228 buffer = calloc(1, sizeof *buffer);
1229 if (!buffer)
1230 goto no_buffer;
1231
1232 fence_fd = xshmfence_alloc_shm();
1233 if (fence_fd < 0)
1234 goto no_fence;
1235 shm_fence = xshmfence_map_shm(fence_fd);
1236 if (shm_fence == NULL) {
1237 close (fence_fd);
1238 goto no_fence;
1239 }
1240
1241 xcb_dri3_fence_from_fd(draw->conn,
1242 pixmap,
1243 (sync_fence = xcb_generate_id(draw->conn)),
1244 false,
1245 fence_fd);
1246
1247 bp_cookie = xcb_dri3_buffer_from_pixmap(draw->conn, pixmap);
1248 bp_reply = xcb_dri3_buffer_from_pixmap_reply(draw->conn, bp_cookie, NULL);
1249 if (!bp_reply)
1250 goto no_image;
1251
1252 buffer->image = loader_dri3_create_image(draw->conn, bp_reply, format,
1253 draw->dri_screen, draw->ext->image,
1254 buffer);
1255 if (!buffer->image)
1256 goto no_image;
1257
1258 buffer->pixmap = pixmap;
1259 buffer->own_pixmap = false;
1260 buffer->width = bp_reply->width;
1261 buffer->height = bp_reply->height;
1262 buffer->buffer_type = buffer_type;
1263 buffer->shm_fence = shm_fence;
1264 buffer->sync_fence = sync_fence;
1265
1266 draw->buffers[buf_id] = buffer;
1267
1268 free(bp_reply);
1269
1270 return buffer;
1271
1272 no_image:
1273 free(bp_reply);
1274 xcb_sync_destroy_fence(draw->conn, sync_fence);
1275 xshmfence_unmap_shm(shm_fence);
1276 no_fence:
1277 free(buffer);
1278 no_buffer:
1279 return NULL;
1280 }
1281
1282 /** dri3_get_buffer
1283 *
1284 * Find a front or back buffer, allocating new ones as necessary
1285 */
1286 static struct loader_dri3_buffer *
1287 dri3_get_buffer(__DRIdrawable *driDrawable,
1288 unsigned int format,
1289 enum loader_dri3_buffer_type buffer_type,
1290 struct loader_dri3_drawable *draw)
1291 {
1292 struct loader_dri3_buffer *buffer;
1293 int buf_id;
1294
1295 if (buffer_type == loader_dri3_buffer_back) {
1296 buf_id = dri3_find_back(draw);
1297
1298 if (buf_id < 0)
1299 return NULL;
1300 } else {
1301 buf_id = LOADER_DRI3_FRONT_ID;
1302 }
1303
1304 buffer = draw->buffers[buf_id];
1305
1306 /* Allocate a new buffer if there isn't an old one, or if that
1307 * old one is the wrong size
1308 */
1309 if (!buffer || buffer->width != draw->width ||
1310 buffer->height != draw->height) {
1311 struct loader_dri3_buffer *new_buffer;
1312
1313 /* Allocate the new buffers
1314 */
1315 new_buffer = dri3_alloc_render_buffer(draw,
1316 format,
1317 draw->width,
1318 draw->height,
1319 draw->depth);
1320 if (!new_buffer)
1321 return NULL;
1322
1323 /* When resizing, copy the contents of the old buffer, waiting for that
1324 * copy to complete using our fences before proceeding
1325 */
1326 switch (buffer_type) {
1327 case loader_dri3_buffer_back:
1328 if (buffer) {
1329 if (!buffer->linear_buffer) {
1330 dri3_fence_reset(draw->conn, new_buffer);
1331 dri3_fence_await(draw->conn, buffer);
1332 dri3_copy_area(draw->conn,
1333 buffer->pixmap,
1334 new_buffer->pixmap,
1335 dri3_drawable_gc(draw),
1336 0, 0, 0, 0,
1337 draw->width, draw->height);
1338 dri3_fence_trigger(draw->conn, new_buffer);
1339 } else if (draw->vtable->in_current_context(draw)) {
1340 (void) loader_dri3_blit_image(draw,
1341 new_buffer->image,
1342 buffer->image,
1343 0, 0, draw->width, draw->height,
1344 0, 0, 0);
1345 }
1346 dri3_free_render_buffer(draw, buffer);
1347 }
1348 break;
1349 case loader_dri3_buffer_front:
1350 loader_dri3_swapbuffer_barrier(draw);
1351 dri3_fence_reset(draw->conn, new_buffer);
1352 dri3_copy_area(draw->conn,
1353 draw->drawable,
1354 new_buffer->pixmap,
1355 dri3_drawable_gc(draw),
1356 0, 0, 0, 0,
1357 draw->width, draw->height);
1358 dri3_fence_trigger(draw->conn, new_buffer);
1359
1360 if (new_buffer->linear_buffer &&
1361 draw->vtable->in_current_context(draw)) {
1362 dri3_fence_await(draw->conn, new_buffer);
1363 (void) loader_dri3_blit_image(draw,
1364 new_buffer->image,
1365 new_buffer->linear_buffer,
1366 0, 0, draw->width, draw->height,
1367 0, 0, 0);
1368 }
1369 break;
1370 }
1371 buffer = new_buffer;
1372 buffer->buffer_type = buffer_type;
1373 draw->buffers[buf_id] = buffer;
1374 }
1375 dri3_fence_await(draw->conn, buffer);
1376
1377 /* Return the requested buffer */
1378 return buffer;
1379 }
1380
1381 /** dri3_free_buffers
1382 *
1383 * Free the front bufffer or all of the back buffers. Used
1384 * when the application changes which buffers it needs
1385 */
1386 static void
1387 dri3_free_buffers(__DRIdrawable *driDrawable,
1388 enum loader_dri3_buffer_type buffer_type,
1389 struct loader_dri3_drawable *draw)
1390 {
1391 struct loader_dri3_buffer *buffer;
1392 int first_id;
1393 int n_id;
1394 int buf_id;
1395
1396 switch (buffer_type) {
1397 case loader_dri3_buffer_back:
1398 first_id = LOADER_DRI3_BACK_ID(0);
1399 n_id = LOADER_DRI3_MAX_BACK;
1400 break;
1401 case loader_dri3_buffer_front:
1402 first_id = LOADER_DRI3_FRONT_ID;
1403 n_id = 1;
1404 }
1405
1406 for (buf_id = first_id; buf_id < first_id + n_id; buf_id++) {
1407 buffer = draw->buffers[buf_id];
1408 if (buffer) {
1409 dri3_free_render_buffer(draw, buffer);
1410 draw->buffers[buf_id] = NULL;
1411 }
1412 }
1413 }
1414
1415 /** loader_dri3_get_buffers
1416 *
1417 * The published buffer allocation API.
1418 * Returns all of the necessary buffers, allocating
1419 * as needed.
1420 */
1421 int
1422 loader_dri3_get_buffers(__DRIdrawable *driDrawable,
1423 unsigned int format,
1424 uint32_t *stamp,
1425 void *loaderPrivate,
1426 uint32_t buffer_mask,
1427 struct __DRIimageList *buffers)
1428 {
1429 struct loader_dri3_drawable *draw = loaderPrivate;
1430 struct loader_dri3_buffer *front, *back;
1431
1432 buffers->image_mask = 0;
1433 buffers->front = NULL;
1434 buffers->back = NULL;
1435
1436 front = NULL;
1437 back = NULL;
1438
1439 if (!dri3_update_drawable(driDrawable, draw))
1440 return false;
1441
1442 /* pixmaps always have front buffers */
1443 if (draw->is_pixmap)
1444 buffer_mask |= __DRI_IMAGE_BUFFER_FRONT;
1445
1446 if (buffer_mask & __DRI_IMAGE_BUFFER_FRONT) {
1447 /* All pixmaps are owned by the server gpu.
1448 * When we use a different gpu, we can't use the pixmap
1449 * as buffer since it is potentially tiled a way
1450 * our device can't understand. In this case, use
1451 * a fake front buffer. Hopefully the pixmap
1452 * content will get synced with the fake front
1453 * buffer.
1454 */
1455 if (draw->is_pixmap && !draw->is_different_gpu)
1456 front = dri3_get_pixmap_buffer(driDrawable,
1457 format,
1458 loader_dri3_buffer_front,
1459 draw);
1460 else
1461 front = dri3_get_buffer(driDrawable,
1462 format,
1463 loader_dri3_buffer_front,
1464 draw);
1465
1466 if (!front)
1467 return false;
1468 } else {
1469 dri3_free_buffers(driDrawable, loader_dri3_buffer_front, draw);
1470 draw->have_fake_front = 0;
1471 }
1472
1473 if (buffer_mask & __DRI_IMAGE_BUFFER_BACK) {
1474 back = dri3_get_buffer(driDrawable,
1475 format,
1476 loader_dri3_buffer_back,
1477 draw);
1478 if (!back)
1479 return false;
1480 draw->have_back = 1;
1481 } else {
1482 dri3_free_buffers(driDrawable, loader_dri3_buffer_back, draw);
1483 draw->have_back = 0;
1484 }
1485
1486 if (front) {
1487 buffers->image_mask |= __DRI_IMAGE_BUFFER_FRONT;
1488 buffers->front = front->image;
1489 draw->have_fake_front = draw->is_different_gpu || !draw->is_pixmap;
1490 }
1491
1492 if (back) {
1493 buffers->image_mask |= __DRI_IMAGE_BUFFER_BACK;
1494 buffers->back = back->image;
1495 }
1496
1497 draw->stamp = stamp;
1498
1499 return true;
1500 }
1501
1502 /** loader_dri3_update_drawable_geometry
1503 *
1504 * Get the current drawable geometry.
1505 */
1506 void
1507 loader_dri3_update_drawable_geometry(struct loader_dri3_drawable *draw)
1508 {
1509 xcb_get_geometry_cookie_t geom_cookie;
1510 xcb_get_geometry_reply_t *geom_reply;
1511
1512 geom_cookie = xcb_get_geometry(draw->conn, draw->drawable);
1513
1514 geom_reply = xcb_get_geometry_reply(draw->conn, geom_cookie, NULL);
1515
1516 if (geom_reply) {
1517 draw->width = geom_reply->width;
1518 draw->height = geom_reply->height;
1519 draw->vtable->set_drawable_size(draw, draw->width, draw->height);
1520
1521 free(geom_reply);
1522 }
1523 }
1524
1525
1526 /**
1527 * Make sure the server has flushed all pending swap buffers to hardware
1528 * for this drawable. Ideally we'd want to send an X protocol request to
1529 * have the server block our connection until the swaps are complete. That
1530 * would avoid the potential round-trip here.
1531 */
1532 void
1533 loader_dri3_swapbuffer_barrier(struct loader_dri3_drawable *draw)
1534 {
1535 int64_t ust, msc, sbc;
1536
1537 (void) loader_dri3_wait_for_sbc(draw, 0, &ust, &msc, &sbc);
1538 }
1539
1540 /**
1541 * Perform any cleanup associated with a close screen operation.
1542 * \param dri_screen[in,out] Pointer to __DRIscreen about to be closed.
1543 *
1544 * This function destroys the screen's cached swap context if any.
1545 */
1546 void
1547 loader_dri3_close_screen(__DRIscreen *dri_screen)
1548 {
1549 mtx_lock(&blit_context.mtx);
1550 if (blit_context.ctx && blit_context.cur_screen == dri_screen) {
1551 blit_context.core->destroyContext(blit_context.ctx);
1552 blit_context.ctx = NULL;
1553 }
1554 mtx_unlock(&blit_context.mtx);
1555 }