loader/dri3: Make sure we invalidate a drawable on size change
[mesa.git] / src / loader / loader_dri3_helper.c
1 /*
2 * Copyright © 2013 Keith Packard
3 * Copyright © 2015 Boyan Ding
4 *
5 * Permission to use, copy, modify, distribute, and sell this software and its
6 * documentation for any purpose is hereby granted without fee, provided that
7 * the above copyright notice appear in all copies and that both that copyright
8 * notice and this permission notice appear in supporting documentation, and
9 * that the name of the copyright holders not be used in advertising or
10 * publicity pertaining to distribution of the software without specific,
11 * written prior permission. The copyright holders make no representations
12 * about the suitability of this software for any purpose. It is provided "as
13 * is" without express or implied warranty.
14 *
15 * THE COPYRIGHT HOLDERS DISCLAIM ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
16 * INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO
17 * EVENT SHALL THE COPYRIGHT HOLDERS BE LIABLE FOR ANY SPECIAL, INDIRECT OR
18 * CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
19 * DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
20 * TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
21 * OF THIS SOFTWARE.
22 */
23
24 #include <fcntl.h>
25 #include <stdlib.h>
26 #include <unistd.h>
27
28 #include <X11/xshmfence.h>
29 #include <xcb/xcb.h>
30 #include <xcb/dri3.h>
31 #include <xcb/present.h>
32
33 #include <X11/Xlib-xcb.h>
34
35 #include <c11/threads.h>
36 #include "loader_dri3_helper.h"
37
38 /* From xmlpool/options.h, user exposed so should be stable */
39 #define DRI_CONF_VBLANK_NEVER 0
40 #define DRI_CONF_VBLANK_DEF_INTERVAL_0 1
41 #define DRI_CONF_VBLANK_DEF_INTERVAL_1 2
42 #define DRI_CONF_VBLANK_ALWAYS_SYNC 3
43
44 /**
45 * A cached blit context.
46 */
47 struct loader_dri3_blit_context {
48 mtx_t mtx;
49 __DRIcontext *ctx;
50 __DRIscreen *cur_screen;
51 const __DRIcoreExtension *core;
52 };
53
54 /* For simplicity we maintain the cache only for a single screen at a time */
55 static struct loader_dri3_blit_context blit_context = {
56 _MTX_INITIALIZER_NP, NULL
57 };
58
59 static void
60 dri3_flush_present_events(struct loader_dri3_drawable *draw);
61
62 static struct loader_dri3_buffer *
63 dri3_find_back_alloc(struct loader_dri3_drawable *draw);
64
65 /**
66 * Do we have blit functionality in the image blit extension?
67 *
68 * \param draw[in] The drawable intended to blit from / to.
69 * \return true if we have blit functionality. false otherwise.
70 */
71 static bool loader_dri3_have_image_blit(const struct loader_dri3_drawable *draw)
72 {
73 return draw->ext->image->base.version >= 9 &&
74 draw->ext->image->blitImage != NULL;
75 }
76
77 /**
78 * Get and lock (for use with the current thread) a dri context associated
79 * with the drawable's dri screen. The context is intended to be used with
80 * the dri image extension's blitImage method.
81 *
82 * \param draw[in] Pointer to the drawable whose dri screen we want a
83 * dri context for.
84 * \return A dri context or NULL if context creation failed.
85 *
86 * When the caller is done with the context (even if the context returned was
87 * NULL), the caller must call loader_dri3_blit_context_put.
88 */
89 static __DRIcontext *
90 loader_dri3_blit_context_get(struct loader_dri3_drawable *draw)
91 {
92 mtx_lock(&blit_context.mtx);
93
94 if (blit_context.ctx && blit_context.cur_screen != draw->dri_screen) {
95 blit_context.core->destroyContext(blit_context.ctx);
96 blit_context.ctx = NULL;
97 }
98
99 if (!blit_context.ctx) {
100 blit_context.ctx = draw->ext->core->createNewContext(draw->dri_screen,
101 NULL, NULL, NULL);
102 blit_context.cur_screen = draw->dri_screen;
103 blit_context.core = draw->ext->core;
104 }
105
106 return blit_context.ctx;
107 }
108
109 /**
110 * Release (for use with other threads) a dri context previously obtained using
111 * loader_dri3_blit_context_get.
112 */
113 static void
114 loader_dri3_blit_context_put(void)
115 {
116 mtx_unlock(&blit_context.mtx);
117 }
118
119 /**
120 * Blit (parts of) the contents of a DRI image to another dri image
121 *
122 * \param draw[in] The drawable which owns the images.
123 * \param dst[in] The destination image.
124 * \param src[in] The source image.
125 * \param dstx0[in] Start destination coordinate.
126 * \param dsty0[in] Start destination coordinate.
127 * \param width[in] Blit width.
128 * \param height[in] Blit height.
129 * \param srcx0[in] Start source coordinate.
130 * \param srcy0[in] Start source coordinate.
131 * \param flush_flag[in] Image blit flush flag.
132 * \return true iff successful.
133 */
134 static bool
135 loader_dri3_blit_image(struct loader_dri3_drawable *draw,
136 __DRIimage *dst, __DRIimage *src,
137 int dstx0, int dsty0, int width, int height,
138 int srcx0, int srcy0, int flush_flag)
139 {
140 __DRIcontext *dri_context;
141 bool use_blit_context = false;
142
143 if (!loader_dri3_have_image_blit(draw))
144 return false;
145
146 dri_context = draw->vtable->get_dri_context(draw);
147
148 if (!dri_context || !draw->vtable->in_current_context(draw)) {
149 dri_context = loader_dri3_blit_context_get(draw);
150 use_blit_context = true;
151 flush_flag |= __BLIT_FLAG_FLUSH;
152 }
153
154 if (dri_context)
155 draw->ext->image->blitImage(dri_context, dst, src, dstx0, dsty0,
156 width, height, srcx0, srcy0,
157 width, height, flush_flag);
158
159 if (use_blit_context)
160 loader_dri3_blit_context_put();
161
162 return dri_context != NULL;
163 }
164
165 static inline void
166 dri3_fence_reset(xcb_connection_t *c, struct loader_dri3_buffer *buffer)
167 {
168 xshmfence_reset(buffer->shm_fence);
169 }
170
171 static inline void
172 dri3_fence_set(struct loader_dri3_buffer *buffer)
173 {
174 xshmfence_trigger(buffer->shm_fence);
175 }
176
177 static inline void
178 dri3_fence_trigger(xcb_connection_t *c, struct loader_dri3_buffer *buffer)
179 {
180 xcb_sync_trigger_fence(c, buffer->sync_fence);
181 }
182
183 static inline void
184 dri3_fence_await(xcb_connection_t *c, struct loader_dri3_drawable *draw,
185 struct loader_dri3_buffer *buffer)
186 {
187 xcb_flush(c);
188 xshmfence_await(buffer->shm_fence);
189 if (draw)
190 dri3_flush_present_events(draw);
191 }
192
193 static void
194 dri3_update_num_back(struct loader_dri3_drawable *draw)
195 {
196 if (draw->flipping)
197 draw->num_back = 3;
198 else
199 draw->num_back = 2;
200 }
201
202 void
203 loader_dri3_set_swap_interval(struct loader_dri3_drawable *draw, int interval)
204 {
205 draw->swap_interval = interval;
206 dri3_update_num_back(draw);
207 }
208
209 /** dri3_free_render_buffer
210 *
211 * Free everything associated with one render buffer including pixmap, fence
212 * stuff and the driver image
213 */
214 static void
215 dri3_free_render_buffer(struct loader_dri3_drawable *draw,
216 struct loader_dri3_buffer *buffer)
217 {
218 if (buffer->own_pixmap)
219 xcb_free_pixmap(draw->conn, buffer->pixmap);
220 xcb_sync_destroy_fence(draw->conn, buffer->sync_fence);
221 xshmfence_unmap_shm(buffer->shm_fence);
222 draw->ext->image->destroyImage(buffer->image);
223 if (buffer->linear_buffer)
224 draw->ext->image->destroyImage(buffer->linear_buffer);
225 free(buffer);
226 }
227
228 void
229 loader_dri3_drawable_fini(struct loader_dri3_drawable *draw)
230 {
231 int i;
232
233 draw->ext->core->destroyDrawable(draw->dri_drawable);
234
235 for (i = 0; i < LOADER_DRI3_NUM_BUFFERS; i++) {
236 if (draw->buffers[i])
237 dri3_free_render_buffer(draw, draw->buffers[i]);
238 }
239
240 if (draw->special_event) {
241 xcb_void_cookie_t cookie =
242 xcb_present_select_input_checked(draw->conn, draw->eid, draw->drawable,
243 XCB_PRESENT_EVENT_MASK_NO_EVENT);
244
245 xcb_discard_reply(draw->conn, cookie.sequence);
246 xcb_unregister_for_special_event(draw->conn, draw->special_event);
247 }
248 }
249
250 int
251 loader_dri3_drawable_init(xcb_connection_t *conn,
252 xcb_drawable_t drawable,
253 __DRIscreen *dri_screen,
254 bool is_different_gpu,
255 const __DRIconfig *dri_config,
256 struct loader_dri3_extensions *ext,
257 const struct loader_dri3_vtable *vtable,
258 struct loader_dri3_drawable *draw)
259 {
260 xcb_get_geometry_cookie_t cookie;
261 xcb_get_geometry_reply_t *reply;
262 xcb_generic_error_t *error;
263 GLint vblank_mode = DRI_CONF_VBLANK_DEF_INTERVAL_1;
264 int swap_interval;
265
266 draw->conn = conn;
267 draw->ext = ext;
268 draw->vtable = vtable;
269 draw->drawable = drawable;
270 draw->dri_screen = dri_screen;
271 draw->is_different_gpu = is_different_gpu;
272
273 draw->have_back = 0;
274 draw->have_fake_front = 0;
275 draw->first_init = true;
276
277 draw->cur_blit_source = -1;
278 draw->back_format = __DRI_IMAGE_FORMAT_NONE;
279
280 if (draw->ext->config)
281 draw->ext->config->configQueryi(draw->dri_screen,
282 "vblank_mode", &vblank_mode);
283
284 switch (vblank_mode) {
285 case DRI_CONF_VBLANK_NEVER:
286 case DRI_CONF_VBLANK_DEF_INTERVAL_0:
287 swap_interval = 0;
288 break;
289 case DRI_CONF_VBLANK_DEF_INTERVAL_1:
290 case DRI_CONF_VBLANK_ALWAYS_SYNC:
291 default:
292 swap_interval = 1;
293 break;
294 }
295 draw->swap_interval = swap_interval;
296
297 dri3_update_num_back(draw);
298
299 /* Create a new drawable */
300 draw->dri_drawable =
301 draw->ext->image_driver->createNewDrawable(dri_screen,
302 dri_config,
303 draw);
304
305 if (!draw->dri_drawable)
306 return 1;
307
308 cookie = xcb_get_geometry(draw->conn, draw->drawable);
309 reply = xcb_get_geometry_reply(draw->conn, cookie, &error);
310 if (reply == NULL || error != NULL) {
311 draw->ext->core->destroyDrawable(draw->dri_drawable);
312 return 1;
313 }
314
315 draw->width = reply->width;
316 draw->height = reply->height;
317 draw->depth = reply->depth;
318 draw->vtable->set_drawable_size(draw, draw->width, draw->height);
319 free(reply);
320
321 draw->swap_method = __DRI_ATTRIB_SWAP_UNDEFINED;
322 if (draw->ext->core->base.version >= 2) {
323 (void )draw->ext->core->getConfigAttrib(dri_config,
324 __DRI_ATTRIB_SWAP_METHOD,
325 &draw->swap_method);
326 }
327
328 /*
329 * Make sure server has the same swap interval we do for the new
330 * drawable.
331 */
332 loader_dri3_set_swap_interval(draw, swap_interval);
333
334 return 0;
335 }
336
337 /*
338 * Process one Present event
339 */
340 static void
341 dri3_handle_present_event(struct loader_dri3_drawable *draw,
342 xcb_present_generic_event_t *ge)
343 {
344 switch (ge->evtype) {
345 case XCB_PRESENT_CONFIGURE_NOTIFY: {
346 xcb_present_configure_notify_event_t *ce = (void *) ge;
347
348 draw->width = ce->width;
349 draw->height = ce->height;
350 draw->vtable->set_drawable_size(draw, draw->width, draw->height);
351 draw->ext->flush->invalidate(draw->dri_drawable);
352 break;
353 }
354 case XCB_PRESENT_COMPLETE_NOTIFY: {
355 xcb_present_complete_notify_event_t *ce = (void *) ge;
356
357 /* Compute the processed SBC number from the received 32-bit serial number
358 * merged with the upper 32-bits of the sent 64-bit serial number while
359 * checking for wrap.
360 */
361 if (ce->kind == XCB_PRESENT_COMPLETE_KIND_PIXMAP) {
362 draw->recv_sbc = (draw->send_sbc & 0xffffffff00000000LL) | ce->serial;
363 if (draw->recv_sbc > draw->send_sbc)
364 draw->recv_sbc -= 0x100000000;
365 switch (ce->mode) {
366 case XCB_PRESENT_COMPLETE_MODE_FLIP:
367 draw->flipping = true;
368 break;
369 case XCB_PRESENT_COMPLETE_MODE_COPY:
370 draw->flipping = false;
371 break;
372 }
373 dri3_update_num_back(draw);
374
375 if (draw->vtable->show_fps)
376 draw->vtable->show_fps(draw, ce->ust);
377
378 draw->ust = ce->ust;
379 draw->msc = ce->msc;
380 } else {
381 draw->recv_msc_serial = ce->serial;
382 draw->notify_ust = ce->ust;
383 draw->notify_msc = ce->msc;
384 }
385 break;
386 }
387 case XCB_PRESENT_EVENT_IDLE_NOTIFY: {
388 xcb_present_idle_notify_event_t *ie = (void *) ge;
389 int b;
390
391 for (b = 0; b < sizeof(draw->buffers) / sizeof(draw->buffers[0]); b++) {
392 struct loader_dri3_buffer *buf = draw->buffers[b];
393
394 if (buf && buf->pixmap == ie->pixmap) {
395 buf->busy = 0;
396 if (draw->num_back <= b && b < LOADER_DRI3_MAX_BACK) {
397 dri3_free_render_buffer(draw, buf);
398 draw->buffers[b] = NULL;
399 }
400 break;
401 }
402 }
403 break;
404 }
405 }
406 free(ge);
407 }
408
409 static bool
410 dri3_wait_for_event(struct loader_dri3_drawable *draw)
411 {
412 xcb_generic_event_t *ev;
413 xcb_present_generic_event_t *ge;
414
415 xcb_flush(draw->conn);
416 ev = xcb_wait_for_special_event(draw->conn, draw->special_event);
417 if (!ev)
418 return false;
419 ge = (void *) ev;
420 dri3_handle_present_event(draw, ge);
421 return true;
422 }
423
424 /** loader_dri3_wait_for_msc
425 *
426 * Get the X server to send an event when the target msc/divisor/remainder is
427 * reached.
428 */
429 bool
430 loader_dri3_wait_for_msc(struct loader_dri3_drawable *draw,
431 int64_t target_msc,
432 int64_t divisor, int64_t remainder,
433 int64_t *ust, int64_t *msc, int64_t *sbc)
434 {
435 uint32_t msc_serial;
436
437 msc_serial = ++draw->send_msc_serial;
438 xcb_present_notify_msc(draw->conn,
439 draw->drawable,
440 msc_serial,
441 target_msc,
442 divisor,
443 remainder);
444
445 xcb_flush(draw->conn);
446
447 /* Wait for the event */
448 if (draw->special_event) {
449 while ((int32_t) (msc_serial - draw->recv_msc_serial) > 0) {
450 if (!dri3_wait_for_event(draw))
451 return false;
452 }
453 }
454
455 *ust = draw->notify_ust;
456 *msc = draw->notify_msc;
457 *sbc = draw->recv_sbc;
458
459 return true;
460 }
461
462 /** loader_dri3_wait_for_sbc
463 *
464 * Wait for the completed swap buffer count to reach the specified
465 * target. Presumably the application knows that this will be reached with
466 * outstanding complete events, or we're going to be here awhile.
467 */
468 int
469 loader_dri3_wait_for_sbc(struct loader_dri3_drawable *draw,
470 int64_t target_sbc, int64_t *ust,
471 int64_t *msc, int64_t *sbc)
472 {
473 /* From the GLX_OML_sync_control spec:
474 *
475 * "If <target_sbc> = 0, the function will block until all previous
476 * swaps requested with glXSwapBuffersMscOML for that window have
477 * completed."
478 */
479 if (!target_sbc)
480 target_sbc = draw->send_sbc;
481
482 while (draw->recv_sbc < target_sbc) {
483 if (!dri3_wait_for_event(draw))
484 return 0;
485 }
486
487 *ust = draw->ust;
488 *msc = draw->msc;
489 *sbc = draw->recv_sbc;
490 return 1;
491 }
492
493 /** loader_dri3_find_back
494 *
495 * Find an idle back buffer. If there isn't one, then
496 * wait for a present idle notify event from the X server
497 */
498 static int
499 dri3_find_back(struct loader_dri3_drawable *draw)
500 {
501 int b;
502 xcb_generic_event_t *ev;
503 xcb_present_generic_event_t *ge;
504 int num_to_consider = draw->num_back;
505
506 /* Increase the likelyhood of reusing current buffer */
507 dri3_flush_present_events(draw);
508
509 /* Check whether we need to reuse the current back buffer as new back.
510 * In that case, wait until it's not busy anymore.
511 */
512 if (!loader_dri3_have_image_blit(draw) && draw->cur_blit_source != -1) {
513 num_to_consider = 1;
514 draw->cur_blit_source = -1;
515 }
516
517 for (;;) {
518 for (b = 0; b < num_to_consider; b++) {
519 int id = LOADER_DRI3_BACK_ID((b + draw->cur_back) % draw->num_back);
520 struct loader_dri3_buffer *buffer = draw->buffers[id];
521
522 if (!buffer || !buffer->busy) {
523 draw->cur_back = id;
524 return id;
525 }
526 }
527 xcb_flush(draw->conn);
528 ev = xcb_wait_for_special_event(draw->conn, draw->special_event);
529 if (!ev)
530 return -1;
531 ge = (void *) ev;
532 dri3_handle_present_event(draw, ge);
533 }
534 }
535
536 static xcb_gcontext_t
537 dri3_drawable_gc(struct loader_dri3_drawable *draw)
538 {
539 if (!draw->gc) {
540 uint32_t v = 0;
541 xcb_create_gc(draw->conn,
542 (draw->gc = xcb_generate_id(draw->conn)),
543 draw->drawable,
544 XCB_GC_GRAPHICS_EXPOSURES,
545 &v);
546 }
547 return draw->gc;
548 }
549
550
551 static struct loader_dri3_buffer *
552 dri3_back_buffer(struct loader_dri3_drawable *draw)
553 {
554 return draw->buffers[LOADER_DRI3_BACK_ID(draw->cur_back)];
555 }
556
557 static struct loader_dri3_buffer *
558 dri3_fake_front_buffer(struct loader_dri3_drawable *draw)
559 {
560 return draw->buffers[LOADER_DRI3_FRONT_ID];
561 }
562
563 static void
564 dri3_copy_area(xcb_connection_t *c,
565 xcb_drawable_t src_drawable,
566 xcb_drawable_t dst_drawable,
567 xcb_gcontext_t gc,
568 int16_t src_x,
569 int16_t src_y,
570 int16_t dst_x,
571 int16_t dst_y,
572 uint16_t width,
573 uint16_t height)
574 {
575 xcb_void_cookie_t cookie;
576
577 cookie = xcb_copy_area_checked(c,
578 src_drawable,
579 dst_drawable,
580 gc,
581 src_x,
582 src_y,
583 dst_x,
584 dst_y,
585 width,
586 height);
587 xcb_discard_reply(c, cookie.sequence);
588 }
589
590 /**
591 * Asks the driver to flush any queued work necessary for serializing with the
592 * X command stream, and optionally the slightly more strict requirement of
593 * glFlush() equivalence (which would require flushing even if nothing had
594 * been drawn to a window system framebuffer, for example).
595 */
596 void
597 loader_dri3_flush(struct loader_dri3_drawable *draw,
598 unsigned flags,
599 enum __DRI2throttleReason throttle_reason)
600 {
601 /* NEED TO CHECK WHETHER CONTEXT IS NULL */
602 __DRIcontext *dri_context = draw->vtable->get_dri_context(draw);
603
604 if (dri_context) {
605 draw->ext->flush->flush_with_flags(dri_context, draw->dri_drawable,
606 flags, throttle_reason);
607 }
608 }
609
610 void
611 loader_dri3_copy_sub_buffer(struct loader_dri3_drawable *draw,
612 int x, int y,
613 int width, int height,
614 bool flush)
615 {
616 struct loader_dri3_buffer *back;
617 unsigned flags = __DRI2_FLUSH_DRAWABLE;
618
619 /* Check we have the right attachments */
620 if (!draw->have_back || draw->is_pixmap)
621 return;
622
623 if (flush)
624 flags |= __DRI2_FLUSH_CONTEXT;
625 loader_dri3_flush(draw, flags, __DRI2_THROTTLE_SWAPBUFFER);
626
627 back = dri3_find_back_alloc(draw);
628 if (!back)
629 return;
630
631 y = draw->height - y - height;
632
633 if (draw->is_different_gpu) {
634 /* Update the linear buffer part of the back buffer
635 * for the dri3_copy_area operation
636 */
637 (void) loader_dri3_blit_image(draw,
638 back->linear_buffer,
639 back->image,
640 0, 0, back->width, back->height,
641 0, 0, __BLIT_FLAG_FLUSH);
642 }
643
644 loader_dri3_swapbuffer_barrier(draw);
645 dri3_fence_reset(draw->conn, back);
646 dri3_copy_area(draw->conn,
647 back->pixmap,
648 draw->drawable,
649 dri3_drawable_gc(draw),
650 x, y, x, y, width, height);
651 dri3_fence_trigger(draw->conn, back);
652 /* Refresh the fake front (if present) after we just damaged the real
653 * front.
654 */
655 if (draw->have_fake_front &&
656 !loader_dri3_blit_image(draw,
657 dri3_fake_front_buffer(draw)->image,
658 back->image,
659 x, y, width, height,
660 x, y, __BLIT_FLAG_FLUSH) &&
661 !draw->is_different_gpu) {
662 dri3_fence_reset(draw->conn, dri3_fake_front_buffer(draw));
663 dri3_copy_area(draw->conn,
664 back->pixmap,
665 dri3_fake_front_buffer(draw)->pixmap,
666 dri3_drawable_gc(draw),
667 x, y, x, y, width, height);
668 dri3_fence_trigger(draw->conn, dri3_fake_front_buffer(draw));
669 dri3_fence_await(draw->conn, NULL, dri3_fake_front_buffer(draw));
670 }
671 dri3_fence_await(draw->conn, draw, back);
672 }
673
674 void
675 loader_dri3_copy_drawable(struct loader_dri3_drawable *draw,
676 xcb_drawable_t dest,
677 xcb_drawable_t src)
678 {
679 loader_dri3_flush(draw, __DRI2_FLUSH_DRAWABLE, 0);
680
681 dri3_fence_reset(draw->conn, dri3_fake_front_buffer(draw));
682 dri3_copy_area(draw->conn,
683 src, dest,
684 dri3_drawable_gc(draw),
685 0, 0, 0, 0, draw->width, draw->height);
686 dri3_fence_trigger(draw->conn, dri3_fake_front_buffer(draw));
687 dri3_fence_await(draw->conn, draw, dri3_fake_front_buffer(draw));
688 }
689
690 void
691 loader_dri3_wait_x(struct loader_dri3_drawable *draw)
692 {
693 struct loader_dri3_buffer *front;
694
695 if (draw == NULL || !draw->have_fake_front)
696 return;
697
698 front = dri3_fake_front_buffer(draw);
699
700 loader_dri3_copy_drawable(draw, front->pixmap, draw->drawable);
701
702 /* In the psc->is_different_gpu case, the linear buffer has been updated,
703 * but not yet the tiled buffer.
704 * Copy back to the tiled buffer we use for rendering.
705 * Note that we don't need flushing.
706 */
707 if (draw->is_different_gpu)
708 (void) loader_dri3_blit_image(draw,
709 front->image,
710 front->linear_buffer,
711 0, 0, front->width, front->height,
712 0, 0, 0);
713 }
714
715 void
716 loader_dri3_wait_gl(struct loader_dri3_drawable *draw)
717 {
718 struct loader_dri3_buffer *front;
719
720 if (draw == NULL || !draw->have_fake_front)
721 return;
722
723 front = dri3_fake_front_buffer(draw);
724
725 /* In the psc->is_different_gpu case, we update the linear_buffer
726 * before updating the real front.
727 */
728 if (draw->is_different_gpu)
729 (void) loader_dri3_blit_image(draw,
730 front->linear_buffer,
731 front->image,
732 0, 0, front->width, front->height,
733 0, 0, __BLIT_FLAG_FLUSH);
734 loader_dri3_swapbuffer_barrier(draw);
735 loader_dri3_copy_drawable(draw, draw->drawable, front->pixmap);
736 }
737
738 /** dri3_flush_present_events
739 *
740 * Process any present events that have been received from the X server
741 */
742 static void
743 dri3_flush_present_events(struct loader_dri3_drawable *draw)
744 {
745 /* Check to see if any configuration changes have occurred
746 * since we were last invoked
747 */
748 if (draw->special_event) {
749 xcb_generic_event_t *ev;
750
751 while ((ev = xcb_poll_for_special_event(draw->conn,
752 draw->special_event)) != NULL) {
753 xcb_present_generic_event_t *ge = (void *) ev;
754 dri3_handle_present_event(draw, ge);
755 }
756 }
757 }
758
759 /** loader_dri3_swap_buffers_msc
760 *
761 * Make the current back buffer visible using the present extension
762 */
763 int64_t
764 loader_dri3_swap_buffers_msc(struct loader_dri3_drawable *draw,
765 int64_t target_msc, int64_t divisor,
766 int64_t remainder, unsigned flush_flags,
767 bool force_copy)
768 {
769 struct loader_dri3_buffer *back;
770 int64_t ret = 0;
771 uint32_t options = XCB_PRESENT_OPTION_NONE;
772
773 draw->vtable->flush_drawable(draw, flush_flags);
774
775 back = dri3_find_back_alloc(draw);
776
777 if (draw->is_different_gpu && back) {
778 /* Update the linear buffer before presenting the pixmap */
779 (void) loader_dri3_blit_image(draw,
780 back->linear_buffer,
781 back->image,
782 0, 0, back->width, back->height,
783 0, 0, __BLIT_FLAG_FLUSH);
784 }
785
786 /* If we need to preload the new back buffer, remember the source.
787 * The force_copy parameter is used by EGL to attempt to preserve
788 * the back buffer across a call to this function.
789 */
790 if (draw->swap_method != __DRI_ATTRIB_SWAP_UNDEFINED || force_copy)
791 draw->cur_blit_source = LOADER_DRI3_BACK_ID(draw->cur_back);
792
793 /* Exchange the back and fake front. Even though the server knows about these
794 * buffers, it has no notion of back and fake front.
795 */
796 if (back && draw->have_fake_front) {
797 struct loader_dri3_buffer *tmp;
798
799 tmp = dri3_fake_front_buffer(draw);
800 draw->buffers[LOADER_DRI3_FRONT_ID] = back;
801 draw->buffers[LOADER_DRI3_BACK_ID(draw->cur_back)] = tmp;
802
803 if (draw->swap_method == __DRI_ATTRIB_SWAP_COPY || force_copy)
804 draw->cur_blit_source = LOADER_DRI3_FRONT_ID;
805 }
806
807 dri3_flush_present_events(draw);
808
809 if (back && !draw->is_pixmap) {
810 dri3_fence_reset(draw->conn, back);
811
812 /* Compute when we want the frame shown by taking the last known
813 * successful MSC and adding in a swap interval for each outstanding swap
814 * request. target_msc=divisor=remainder=0 means "Use glXSwapBuffers()
815 * semantic"
816 */
817 ++draw->send_sbc;
818 if (target_msc == 0 && divisor == 0 && remainder == 0)
819 target_msc = draw->msc + draw->swap_interval *
820 (draw->send_sbc - draw->recv_sbc);
821 else if (divisor == 0 && remainder > 0) {
822 /* From the GLX_OML_sync_control spec:
823 * "If <divisor> = 0, the swap will occur when MSC becomes
824 * greater than or equal to <target_msc>."
825 *
826 * Note that there's no mention of the remainder. The Present
827 * extension throws BadValue for remainder != 0 with divisor == 0, so
828 * just drop the passed in value.
829 */
830 remainder = 0;
831 }
832
833 /* From the GLX_EXT_swap_control spec
834 * and the EGL 1.4 spec (page 53):
835 *
836 * "If <interval> is set to a value of 0, buffer swaps are not
837 * synchronized to a video frame."
838 *
839 * Implementation note: It is possible to enable triple buffering
840 * behaviour by not using XCB_PRESENT_OPTION_ASYNC, but this should not be
841 * the default.
842 */
843 if (draw->swap_interval == 0)
844 options |= XCB_PRESENT_OPTION_ASYNC;
845
846 /* If we need to populate the new back, but need to reuse the back
847 * buffer slot due to lack of local blit capabilities, make sure
848 * the server doesn't flip and we deadlock.
849 */
850 if (!loader_dri3_have_image_blit(draw) && draw->cur_blit_source != -1)
851 options |= XCB_PRESENT_OPTION_COPY;
852
853 back->busy = 1;
854 back->last_swap = draw->send_sbc;
855 xcb_present_pixmap(draw->conn,
856 draw->drawable,
857 back->pixmap,
858 (uint32_t) draw->send_sbc,
859 0, /* valid */
860 0, /* update */
861 0, /* x_off */
862 0, /* y_off */
863 None, /* target_crtc */
864 None,
865 back->sync_fence,
866 options,
867 target_msc,
868 divisor,
869 remainder, 0, NULL);
870 ret = (int64_t) draw->send_sbc;
871
872 /* Schedule a server-side back-preserving blit if necessary.
873 * This happens iff all conditions below are satisfied:
874 * a) We have a fake front,
875 * b) We need to preserve the back buffer,
876 * c) We don't have local blit capabilities.
877 */
878 if (!loader_dri3_have_image_blit(draw) && draw->cur_blit_source != -1 &&
879 draw->cur_blit_source != LOADER_DRI3_BACK_ID(draw->cur_back)) {
880 struct loader_dri3_buffer *new_back = dri3_back_buffer(draw);
881 struct loader_dri3_buffer *src = draw->buffers[draw->cur_blit_source];
882
883 dri3_fence_reset(draw->conn, new_back);
884 dri3_copy_area(draw->conn, src->pixmap,
885 new_back->pixmap,
886 dri3_drawable_gc(draw),
887 0, 0, 0, 0, draw->width, draw->height);
888 dri3_fence_trigger(draw->conn, new_back);
889 new_back->last_swap = src->last_swap;
890 }
891
892 xcb_flush(draw->conn);
893 if (draw->stamp)
894 ++(*draw->stamp);
895 }
896
897 draw->ext->flush->invalidate(draw->dri_drawable);
898
899 return ret;
900 }
901
902 int
903 loader_dri3_query_buffer_age(struct loader_dri3_drawable *draw)
904 {
905 struct loader_dri3_buffer *back = dri3_find_back_alloc(draw);
906
907 if (!back || back->last_swap == 0)
908 return 0;
909
910 return draw->send_sbc - back->last_swap + 1;
911 }
912
913 /** loader_dri3_open
914 *
915 * Wrapper around xcb_dri3_open
916 */
917 int
918 loader_dri3_open(xcb_connection_t *conn,
919 xcb_window_t root,
920 uint32_t provider)
921 {
922 xcb_dri3_open_cookie_t cookie;
923 xcb_dri3_open_reply_t *reply;
924 int fd;
925
926 cookie = xcb_dri3_open(conn,
927 root,
928 provider);
929
930 reply = xcb_dri3_open_reply(conn, cookie, NULL);
931 if (!reply)
932 return -1;
933
934 if (reply->nfd != 1) {
935 free(reply);
936 return -1;
937 }
938
939 fd = xcb_dri3_open_reply_fds(conn, reply)[0];
940 free(reply);
941 fcntl(fd, F_SETFD, fcntl(fd, F_GETFD) | FD_CLOEXEC);
942
943 return fd;
944 }
945
946 static uint32_t
947 dri3_cpp_for_format(uint32_t format) {
948 switch (format) {
949 case __DRI_IMAGE_FORMAT_R8:
950 return 1;
951 case __DRI_IMAGE_FORMAT_RGB565:
952 case __DRI_IMAGE_FORMAT_GR88:
953 return 2;
954 case __DRI_IMAGE_FORMAT_XRGB8888:
955 case __DRI_IMAGE_FORMAT_ARGB8888:
956 case __DRI_IMAGE_FORMAT_ABGR8888:
957 case __DRI_IMAGE_FORMAT_XBGR8888:
958 case __DRI_IMAGE_FORMAT_XRGB2101010:
959 case __DRI_IMAGE_FORMAT_ARGB2101010:
960 case __DRI_IMAGE_FORMAT_SARGB8:
961 return 4;
962 case __DRI_IMAGE_FORMAT_NONE:
963 default:
964 return 0;
965 }
966 }
967
968 /* the DRIimage createImage function takes __DRI_IMAGE_FORMAT codes, while
969 * the createImageFromFds call takes __DRI_IMAGE_FOURCC codes. To avoid
970 * complete confusion, just deal in __DRI_IMAGE_FORMAT codes for now and
971 * translate to __DRI_IMAGE_FOURCC codes in the call to createImageFromFds
972 */
973 static int
974 image_format_to_fourcc(int format)
975 {
976
977 /* Convert from __DRI_IMAGE_FORMAT to __DRI_IMAGE_FOURCC (sigh) */
978 switch (format) {
979 case __DRI_IMAGE_FORMAT_SARGB8: return __DRI_IMAGE_FOURCC_SARGB8888;
980 case __DRI_IMAGE_FORMAT_RGB565: return __DRI_IMAGE_FOURCC_RGB565;
981 case __DRI_IMAGE_FORMAT_XRGB8888: return __DRI_IMAGE_FOURCC_XRGB8888;
982 case __DRI_IMAGE_FORMAT_ARGB8888: return __DRI_IMAGE_FOURCC_ARGB8888;
983 case __DRI_IMAGE_FORMAT_ABGR8888: return __DRI_IMAGE_FOURCC_ABGR8888;
984 case __DRI_IMAGE_FORMAT_XBGR8888: return __DRI_IMAGE_FOURCC_XBGR8888;
985 }
986 return 0;
987 }
988
989 /** loader_dri3_alloc_render_buffer
990 *
991 * Use the driver createImage function to construct a __DRIimage, then
992 * get a file descriptor for that and create an X pixmap from that
993 *
994 * Allocate an xshmfence for synchronization
995 */
996 static struct loader_dri3_buffer *
997 dri3_alloc_render_buffer(struct loader_dri3_drawable *draw, unsigned int format,
998 int width, int height, int depth)
999 {
1000 struct loader_dri3_buffer *buffer;
1001 __DRIimage *pixmap_buffer;
1002 xcb_pixmap_t pixmap;
1003 xcb_sync_fence_t sync_fence;
1004 struct xshmfence *shm_fence;
1005 int buffer_fd, fence_fd;
1006 int stride;
1007
1008 /* Create an xshmfence object and
1009 * prepare to send that to the X server
1010 */
1011
1012 fence_fd = xshmfence_alloc_shm();
1013 if (fence_fd < 0)
1014 return NULL;
1015
1016 shm_fence = xshmfence_map_shm(fence_fd);
1017 if (shm_fence == NULL)
1018 goto no_shm_fence;
1019
1020 /* Allocate the image from the driver
1021 */
1022 buffer = calloc(1, sizeof *buffer);
1023 if (!buffer)
1024 goto no_buffer;
1025
1026 buffer->cpp = dri3_cpp_for_format(format);
1027 if (!buffer->cpp)
1028 goto no_image;
1029
1030 if (!draw->is_different_gpu) {
1031 buffer->image = draw->ext->image->createImage(draw->dri_screen,
1032 width, height,
1033 format,
1034 __DRI_IMAGE_USE_SHARE |
1035 __DRI_IMAGE_USE_SCANOUT |
1036 __DRI_IMAGE_USE_BACKBUFFER,
1037 buffer);
1038 pixmap_buffer = buffer->image;
1039
1040 if (!buffer->image)
1041 goto no_image;
1042 } else {
1043 buffer->image = draw->ext->image->createImage(draw->dri_screen,
1044 width, height,
1045 format,
1046 0,
1047 buffer);
1048
1049 if (!buffer->image)
1050 goto no_image;
1051
1052 buffer->linear_buffer =
1053 draw->ext->image->createImage(draw->dri_screen,
1054 width, height, format,
1055 __DRI_IMAGE_USE_SHARE |
1056 __DRI_IMAGE_USE_LINEAR |
1057 __DRI_IMAGE_USE_BACKBUFFER,
1058 buffer);
1059 pixmap_buffer = buffer->linear_buffer;
1060
1061 if (!buffer->linear_buffer)
1062 goto no_linear_buffer;
1063 }
1064
1065 /* X wants the stride, so ask the image for it
1066 */
1067 if (!draw->ext->image->queryImage(pixmap_buffer, __DRI_IMAGE_ATTRIB_STRIDE,
1068 &stride))
1069 goto no_buffer_attrib;
1070
1071 buffer->pitch = stride;
1072
1073 if (!draw->ext->image->queryImage(pixmap_buffer, __DRI_IMAGE_ATTRIB_FD,
1074 &buffer_fd))
1075 goto no_buffer_attrib;
1076
1077 xcb_dri3_pixmap_from_buffer(draw->conn,
1078 (pixmap = xcb_generate_id(draw->conn)),
1079 draw->drawable,
1080 buffer->size,
1081 width, height, buffer->pitch,
1082 depth, buffer->cpp * 8,
1083 buffer_fd);
1084
1085 xcb_dri3_fence_from_fd(draw->conn,
1086 pixmap,
1087 (sync_fence = xcb_generate_id(draw->conn)),
1088 false,
1089 fence_fd);
1090
1091 buffer->pixmap = pixmap;
1092 buffer->own_pixmap = true;
1093 buffer->sync_fence = sync_fence;
1094 buffer->shm_fence = shm_fence;
1095 buffer->width = width;
1096 buffer->height = height;
1097
1098 /* Mark the buffer as idle
1099 */
1100 dri3_fence_set(buffer);
1101
1102 return buffer;
1103
1104 no_buffer_attrib:
1105 draw->ext->image->destroyImage(pixmap_buffer);
1106 no_linear_buffer:
1107 if (draw->is_different_gpu)
1108 draw->ext->image->destroyImage(buffer->image);
1109 no_image:
1110 free(buffer);
1111 no_buffer:
1112 xshmfence_unmap_shm(shm_fence);
1113 no_shm_fence:
1114 close(fence_fd);
1115 return NULL;
1116 }
1117
1118 /** loader_dri3_update_drawable
1119 *
1120 * Called the first time we use the drawable and then
1121 * after we receive present configure notify events to
1122 * track the geometry of the drawable
1123 */
1124 static int
1125 dri3_update_drawable(__DRIdrawable *driDrawable,
1126 struct loader_dri3_drawable *draw)
1127 {
1128 if (draw->first_init) {
1129 xcb_get_geometry_cookie_t geom_cookie;
1130 xcb_get_geometry_reply_t *geom_reply;
1131 xcb_void_cookie_t cookie;
1132 xcb_generic_error_t *error;
1133 xcb_present_query_capabilities_cookie_t present_capabilities_cookie;
1134 xcb_present_query_capabilities_reply_t *present_capabilities_reply;
1135
1136 draw->first_init = false;
1137
1138 /* Try to select for input on the window.
1139 *
1140 * If the drawable is a window, this will get our events
1141 * delivered.
1142 *
1143 * Otherwise, we'll get a BadWindow error back from this request which
1144 * will let us know that the drawable is a pixmap instead.
1145 */
1146
1147 draw->eid = xcb_generate_id(draw->conn);
1148 cookie =
1149 xcb_present_select_input_checked(draw->conn, draw->eid, draw->drawable,
1150 XCB_PRESENT_EVENT_MASK_CONFIGURE_NOTIFY |
1151 XCB_PRESENT_EVENT_MASK_COMPLETE_NOTIFY |
1152 XCB_PRESENT_EVENT_MASK_IDLE_NOTIFY);
1153
1154 present_capabilities_cookie =
1155 xcb_present_query_capabilities(draw->conn, draw->drawable);
1156
1157 /* Create an XCB event queue to hold present events outside of the usual
1158 * application event queue
1159 */
1160 draw->special_event = xcb_register_for_special_xge(draw->conn,
1161 &xcb_present_id,
1162 draw->eid,
1163 draw->stamp);
1164 geom_cookie = xcb_get_geometry(draw->conn, draw->drawable);
1165
1166 geom_reply = xcb_get_geometry_reply(draw->conn, geom_cookie, NULL);
1167
1168 if (!geom_reply)
1169 return false;
1170
1171 draw->width = geom_reply->width;
1172 draw->height = geom_reply->height;
1173 draw->depth = geom_reply->depth;
1174 draw->vtable->set_drawable_size(draw, draw->width, draw->height);
1175
1176 free(geom_reply);
1177
1178 draw->is_pixmap = false;
1179
1180 /* Check to see if our select input call failed. If it failed with a
1181 * BadWindow error, then assume the drawable is a pixmap. Destroy the
1182 * special event queue created above and mark the drawable as a pixmap
1183 */
1184
1185 error = xcb_request_check(draw->conn, cookie);
1186
1187 present_capabilities_reply =
1188 xcb_present_query_capabilities_reply(draw->conn,
1189 present_capabilities_cookie,
1190 NULL);
1191
1192 if (present_capabilities_reply) {
1193 draw->present_capabilities = present_capabilities_reply->capabilities;
1194 free(present_capabilities_reply);
1195 } else
1196 draw->present_capabilities = 0;
1197
1198 if (error) {
1199 if (error->error_code != BadWindow) {
1200 free(error);
1201 return false;
1202 }
1203 draw->is_pixmap = true;
1204 xcb_unregister_for_special_event(draw->conn, draw->special_event);
1205 draw->special_event = NULL;
1206 }
1207 }
1208 dri3_flush_present_events(draw);
1209 return true;
1210 }
1211
1212 __DRIimage *
1213 loader_dri3_create_image(xcb_connection_t *c,
1214 xcb_dri3_buffer_from_pixmap_reply_t *bp_reply,
1215 unsigned int format,
1216 __DRIscreen *dri_screen,
1217 const __DRIimageExtension *image,
1218 void *loaderPrivate)
1219 {
1220 int *fds;
1221 __DRIimage *image_planar, *ret;
1222 int stride, offset;
1223
1224 /* Get an FD for the pixmap object
1225 */
1226 fds = xcb_dri3_buffer_from_pixmap_reply_fds(c, bp_reply);
1227
1228 stride = bp_reply->stride;
1229 offset = 0;
1230
1231 /* createImageFromFds creates a wrapper __DRIimage structure which
1232 * can deal with multiple planes for things like Yuv images. So, once
1233 * we've gotten the planar wrapper, pull the single plane out of it and
1234 * discard the wrapper.
1235 */
1236 image_planar = image->createImageFromFds(dri_screen,
1237 bp_reply->width,
1238 bp_reply->height,
1239 image_format_to_fourcc(format),
1240 fds, 1,
1241 &stride, &offset, loaderPrivate);
1242 close(fds[0]);
1243 if (!image_planar)
1244 return NULL;
1245
1246 ret = image->fromPlanar(image_planar, 0, loaderPrivate);
1247
1248 image->destroyImage(image_planar);
1249
1250 return ret;
1251 }
1252
1253 /** dri3_get_pixmap_buffer
1254 *
1255 * Get the DRM object for a pixmap from the X server and
1256 * wrap that with a __DRIimage structure using createImageFromFds
1257 */
1258 static struct loader_dri3_buffer *
1259 dri3_get_pixmap_buffer(__DRIdrawable *driDrawable, unsigned int format,
1260 enum loader_dri3_buffer_type buffer_type,
1261 struct loader_dri3_drawable *draw)
1262 {
1263 int buf_id = loader_dri3_pixmap_buf_id(buffer_type);
1264 struct loader_dri3_buffer *buffer = draw->buffers[buf_id];
1265 xcb_drawable_t pixmap;
1266 xcb_dri3_buffer_from_pixmap_cookie_t bp_cookie;
1267 xcb_dri3_buffer_from_pixmap_reply_t *bp_reply;
1268 xcb_sync_fence_t sync_fence;
1269 struct xshmfence *shm_fence;
1270 int fence_fd;
1271
1272 if (buffer)
1273 return buffer;
1274
1275 pixmap = draw->drawable;
1276
1277 buffer = calloc(1, sizeof *buffer);
1278 if (!buffer)
1279 goto no_buffer;
1280
1281 fence_fd = xshmfence_alloc_shm();
1282 if (fence_fd < 0)
1283 goto no_fence;
1284 shm_fence = xshmfence_map_shm(fence_fd);
1285 if (shm_fence == NULL) {
1286 close (fence_fd);
1287 goto no_fence;
1288 }
1289
1290 xcb_dri3_fence_from_fd(draw->conn,
1291 pixmap,
1292 (sync_fence = xcb_generate_id(draw->conn)),
1293 false,
1294 fence_fd);
1295
1296 bp_cookie = xcb_dri3_buffer_from_pixmap(draw->conn, pixmap);
1297 bp_reply = xcb_dri3_buffer_from_pixmap_reply(draw->conn, bp_cookie, NULL);
1298 if (!bp_reply)
1299 goto no_image;
1300
1301 buffer->image = loader_dri3_create_image(draw->conn, bp_reply, format,
1302 draw->dri_screen, draw->ext->image,
1303 buffer);
1304 if (!buffer->image)
1305 goto no_image;
1306
1307 buffer->pixmap = pixmap;
1308 buffer->own_pixmap = false;
1309 buffer->width = bp_reply->width;
1310 buffer->height = bp_reply->height;
1311 buffer->shm_fence = shm_fence;
1312 buffer->sync_fence = sync_fence;
1313
1314 draw->buffers[buf_id] = buffer;
1315
1316 free(bp_reply);
1317
1318 return buffer;
1319
1320 no_image:
1321 free(bp_reply);
1322 xcb_sync_destroy_fence(draw->conn, sync_fence);
1323 xshmfence_unmap_shm(shm_fence);
1324 no_fence:
1325 free(buffer);
1326 no_buffer:
1327 return NULL;
1328 }
1329
1330 /** dri3_get_buffer
1331 *
1332 * Find a front or back buffer, allocating new ones as necessary
1333 */
1334 static struct loader_dri3_buffer *
1335 dri3_get_buffer(__DRIdrawable *driDrawable,
1336 unsigned int format,
1337 enum loader_dri3_buffer_type buffer_type,
1338 struct loader_dri3_drawable *draw)
1339 {
1340 struct loader_dri3_buffer *buffer;
1341 int buf_id;
1342
1343 if (buffer_type == loader_dri3_buffer_back) {
1344 draw->back_format = format;
1345
1346 buf_id = dri3_find_back(draw);
1347
1348 if (buf_id < 0)
1349 return NULL;
1350 } else {
1351 buf_id = LOADER_DRI3_FRONT_ID;
1352 }
1353
1354 buffer = draw->buffers[buf_id];
1355
1356 /* Allocate a new buffer if there isn't an old one, or if that
1357 * old one is the wrong size
1358 */
1359 if (!buffer || buffer->width != draw->width ||
1360 buffer->height != draw->height) {
1361 struct loader_dri3_buffer *new_buffer;
1362
1363 /* Allocate the new buffers
1364 */
1365 new_buffer = dri3_alloc_render_buffer(draw,
1366 format,
1367 draw->width,
1368 draw->height,
1369 draw->depth);
1370 if (!new_buffer)
1371 return NULL;
1372
1373 /* When resizing, copy the contents of the old buffer, waiting for that
1374 * copy to complete using our fences before proceeding
1375 */
1376 switch (buffer_type) {
1377 case loader_dri3_buffer_back:
1378 if (buffer) {
1379 if (!buffer->linear_buffer) {
1380 dri3_fence_reset(draw->conn, new_buffer);
1381 dri3_fence_await(draw->conn, draw, buffer);
1382 dri3_copy_area(draw->conn,
1383 buffer->pixmap,
1384 new_buffer->pixmap,
1385 dri3_drawable_gc(draw),
1386 0, 0, 0, 0,
1387 draw->width, draw->height);
1388 dri3_fence_trigger(draw->conn, new_buffer);
1389 } else if (draw->vtable->in_current_context(draw)) {
1390 (void) loader_dri3_blit_image(draw,
1391 new_buffer->image,
1392 buffer->image,
1393 0, 0, draw->width, draw->height,
1394 0, 0, 0);
1395 }
1396 dri3_free_render_buffer(draw, buffer);
1397 }
1398 break;
1399 case loader_dri3_buffer_front:
1400 loader_dri3_swapbuffer_barrier(draw);
1401 dri3_fence_reset(draw->conn, new_buffer);
1402 dri3_copy_area(draw->conn,
1403 draw->drawable,
1404 new_buffer->pixmap,
1405 dri3_drawable_gc(draw),
1406 0, 0, 0, 0,
1407 draw->width, draw->height);
1408 dri3_fence_trigger(draw->conn, new_buffer);
1409
1410 if (new_buffer->linear_buffer &&
1411 draw->vtable->in_current_context(draw)) {
1412 dri3_fence_await(draw->conn, draw, new_buffer);
1413 (void) loader_dri3_blit_image(draw,
1414 new_buffer->image,
1415 new_buffer->linear_buffer,
1416 0, 0, draw->width, draw->height,
1417 0, 0, 0);
1418 }
1419 break;
1420 }
1421 buffer = new_buffer;
1422 draw->buffers[buf_id] = buffer;
1423 }
1424 dri3_fence_await(draw->conn, draw, buffer);
1425
1426 /*
1427 * Do we need to preserve the content of a previous buffer?
1428 *
1429 * Note that this blit is needed only to avoid a wait for a buffer that
1430 * is currently in the flip chain or being scanned out from. That's really
1431 * a tradeoff. If we're ok with the wait we can reduce the number of back
1432 * buffers to 1 for SWAP_EXCHANGE, and 1 for SWAP_COPY,
1433 * but in the latter case we must disallow page-flipping.
1434 */
1435 if (buffer_type == loader_dri3_buffer_back &&
1436 draw->cur_blit_source != -1 &&
1437 draw->buffers[draw->cur_blit_source] &&
1438 buffer != draw->buffers[draw->cur_blit_source]) {
1439
1440 struct loader_dri3_buffer *source = draw->buffers[draw->cur_blit_source];
1441
1442 /* Avoid flushing here. Will propably do good for tiling hardware. */
1443 (void) loader_dri3_blit_image(draw,
1444 buffer->image,
1445 source->image,
1446 0, 0, draw->width, draw->height,
1447 0, 0, 0);
1448 buffer->last_swap = source->last_swap;
1449 draw->cur_blit_source = -1;
1450 }
1451 /* Return the requested buffer */
1452 return buffer;
1453 }
1454
1455 /** dri3_free_buffers
1456 *
1457 * Free the front bufffer or all of the back buffers. Used
1458 * when the application changes which buffers it needs
1459 */
1460 static void
1461 dri3_free_buffers(__DRIdrawable *driDrawable,
1462 enum loader_dri3_buffer_type buffer_type,
1463 struct loader_dri3_drawable *draw)
1464 {
1465 struct loader_dri3_buffer *buffer;
1466 int first_id;
1467 int n_id;
1468 int buf_id;
1469
1470 switch (buffer_type) {
1471 case loader_dri3_buffer_back:
1472 first_id = LOADER_DRI3_BACK_ID(0);
1473 n_id = LOADER_DRI3_MAX_BACK;
1474 break;
1475 case loader_dri3_buffer_front:
1476 first_id = LOADER_DRI3_FRONT_ID;
1477 n_id = 1;
1478 }
1479
1480 for (buf_id = first_id; buf_id < first_id + n_id; buf_id++) {
1481 buffer = draw->buffers[buf_id];
1482 if (buffer) {
1483 dri3_free_render_buffer(draw, buffer);
1484 draw->buffers[buf_id] = NULL;
1485 }
1486 }
1487 }
1488
1489 /** loader_dri3_get_buffers
1490 *
1491 * The published buffer allocation API.
1492 * Returns all of the necessary buffers, allocating
1493 * as needed.
1494 */
1495 int
1496 loader_dri3_get_buffers(__DRIdrawable *driDrawable,
1497 unsigned int format,
1498 uint32_t *stamp,
1499 void *loaderPrivate,
1500 uint32_t buffer_mask,
1501 struct __DRIimageList *buffers)
1502 {
1503 struct loader_dri3_drawable *draw = loaderPrivate;
1504 struct loader_dri3_buffer *front, *back;
1505
1506 buffers->image_mask = 0;
1507 buffers->front = NULL;
1508 buffers->back = NULL;
1509
1510 front = NULL;
1511 back = NULL;
1512
1513 if (!dri3_update_drawable(driDrawable, draw))
1514 return false;
1515
1516 /* pixmaps always have front buffers.
1517 * Exchange swaps also mandate fake front buffers.
1518 */
1519 if (draw->is_pixmap || draw->swap_method == __DRI_ATTRIB_SWAP_EXCHANGE)
1520 buffer_mask |= __DRI_IMAGE_BUFFER_FRONT;
1521
1522 if (buffer_mask & __DRI_IMAGE_BUFFER_FRONT) {
1523 /* All pixmaps are owned by the server gpu.
1524 * When we use a different gpu, we can't use the pixmap
1525 * as buffer since it is potentially tiled a way
1526 * our device can't understand. In this case, use
1527 * a fake front buffer. Hopefully the pixmap
1528 * content will get synced with the fake front
1529 * buffer.
1530 */
1531 if (draw->is_pixmap && !draw->is_different_gpu)
1532 front = dri3_get_pixmap_buffer(driDrawable,
1533 format,
1534 loader_dri3_buffer_front,
1535 draw);
1536 else
1537 front = dri3_get_buffer(driDrawable,
1538 format,
1539 loader_dri3_buffer_front,
1540 draw);
1541
1542 if (!front)
1543 return false;
1544 } else {
1545 dri3_free_buffers(driDrawable, loader_dri3_buffer_front, draw);
1546 draw->have_fake_front = 0;
1547 }
1548
1549 if (buffer_mask & __DRI_IMAGE_BUFFER_BACK) {
1550 back = dri3_get_buffer(driDrawable,
1551 format,
1552 loader_dri3_buffer_back,
1553 draw);
1554 if (!back)
1555 return false;
1556 draw->have_back = 1;
1557 } else {
1558 dri3_free_buffers(driDrawable, loader_dri3_buffer_back, draw);
1559 draw->have_back = 0;
1560 }
1561
1562 if (front) {
1563 buffers->image_mask |= __DRI_IMAGE_BUFFER_FRONT;
1564 buffers->front = front->image;
1565 draw->have_fake_front = draw->is_different_gpu || !draw->is_pixmap;
1566 }
1567
1568 if (back) {
1569 buffers->image_mask |= __DRI_IMAGE_BUFFER_BACK;
1570 buffers->back = back->image;
1571 }
1572
1573 draw->stamp = stamp;
1574
1575 return true;
1576 }
1577
1578 /** loader_dri3_update_drawable_geometry
1579 *
1580 * Get the current drawable geometry.
1581 */
1582 void
1583 loader_dri3_update_drawable_geometry(struct loader_dri3_drawable *draw)
1584 {
1585 xcb_get_geometry_cookie_t geom_cookie;
1586 xcb_get_geometry_reply_t *geom_reply;
1587
1588 geom_cookie = xcb_get_geometry(draw->conn, draw->drawable);
1589
1590 geom_reply = xcb_get_geometry_reply(draw->conn, geom_cookie, NULL);
1591
1592 if (geom_reply) {
1593 draw->width = geom_reply->width;
1594 draw->height = geom_reply->height;
1595 draw->vtable->set_drawable_size(draw, draw->width, draw->height);
1596 draw->ext->flush->invalidate(draw->dri_drawable);
1597
1598 free(geom_reply);
1599 }
1600 }
1601
1602
1603 /**
1604 * Make sure the server has flushed all pending swap buffers to hardware
1605 * for this drawable. Ideally we'd want to send an X protocol request to
1606 * have the server block our connection until the swaps are complete. That
1607 * would avoid the potential round-trip here.
1608 */
1609 void
1610 loader_dri3_swapbuffer_barrier(struct loader_dri3_drawable *draw)
1611 {
1612 int64_t ust, msc, sbc;
1613
1614 (void) loader_dri3_wait_for_sbc(draw, 0, &ust, &msc, &sbc);
1615 }
1616
1617 /**
1618 * Perform any cleanup associated with a close screen operation.
1619 * \param dri_screen[in,out] Pointer to __DRIscreen about to be closed.
1620 *
1621 * This function destroys the screen's cached swap context if any.
1622 */
1623 void
1624 loader_dri3_close_screen(__DRIscreen *dri_screen)
1625 {
1626 mtx_lock(&blit_context.mtx);
1627 if (blit_context.ctx && blit_context.cur_screen == dri_screen) {
1628 blit_context.core->destroyContext(blit_context.ctx);
1629 blit_context.ctx = NULL;
1630 }
1631 mtx_unlock(&blit_context.mtx);
1632 }
1633
1634 /**
1635 * Find a backbuffer slot - potentially allocating a back buffer
1636 *
1637 * \param draw[in,out] Pointer to the drawable for which to find back.
1638 * \return Pointer to a new back buffer or NULL if allocation failed or was
1639 * not mandated.
1640 *
1641 * Find a potentially new back buffer, and if it's not been allocated yet and
1642 * in addition needs initializing, then try to allocate and initialize it.
1643 */
1644 static struct loader_dri3_buffer *
1645 dri3_find_back_alloc(struct loader_dri3_drawable *draw)
1646 {
1647 struct loader_dri3_buffer *back;
1648 int id;
1649
1650 id = dri3_find_back(draw);
1651 back = (id >= 0) ? draw->buffers[id] : NULL;
1652
1653 if (back || (id >= 0 && draw->back_format != __DRI_IMAGE_FORMAT_NONE)) {
1654 if (dri3_update_drawable(draw->dri_drawable, draw)) {
1655 (void) dri3_get_buffer(draw->dri_drawable,
1656 draw->back_format,
1657 loader_dri3_buffer_back,
1658 draw);
1659 back = (id >= 0) ? draw->buffers[id] : NULL;
1660 }
1661 }
1662
1663 return back;
1664 }