e3120f5d250bfd93ae7453d00d909dc05af75233
[mesa.git] / src / loader / loader_dri3_helper.c
1 /*
2 * Copyright © 2013 Keith Packard
3 * Copyright © 2015 Boyan Ding
4 *
5 * Permission to use, copy, modify, distribute, and sell this software and its
6 * documentation for any purpose is hereby granted without fee, provided that
7 * the above copyright notice appear in all copies and that both that copyright
8 * notice and this permission notice appear in supporting documentation, and
9 * that the name of the copyright holders not be used in advertising or
10 * publicity pertaining to distribution of the software without specific,
11 * written prior permission. The copyright holders make no representations
12 * about the suitability of this software for any purpose. It is provided "as
13 * is" without express or implied warranty.
14 *
15 * THE COPYRIGHT HOLDERS DISCLAIM ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
16 * INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO
17 * EVENT SHALL THE COPYRIGHT HOLDERS BE LIABLE FOR ANY SPECIAL, INDIRECT OR
18 * CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
19 * DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
20 * TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
21 * OF THIS SOFTWARE.
22 */
23
24 #include <fcntl.h>
25 #include <stdlib.h>
26 #include <unistd.h>
27
28 #include <X11/xshmfence.h>
29 #include <xcb/xcb.h>
30 #include <xcb/dri3.h>
31 #include <xcb/present.h>
32
33 #include <X11/Xlib-xcb.h>
34
35 #include <c11/threads.h>
36 #include "loader_dri3_helper.h"
37
38 /* From xmlpool/options.h, user exposed so should be stable */
39 #define DRI_CONF_VBLANK_NEVER 0
40 #define DRI_CONF_VBLANK_DEF_INTERVAL_0 1
41 #define DRI_CONF_VBLANK_DEF_INTERVAL_1 2
42 #define DRI_CONF_VBLANK_ALWAYS_SYNC 3
43
44 /**
45 * A cached blit context.
46 */
47 struct loader_dri3_blit_context {
48 mtx_t mtx;
49 __DRIcontext *ctx;
50 __DRIscreen *cur_screen;
51 const __DRIcoreExtension *core;
52 };
53
54 /* For simplicity we maintain the cache only for a single screen at a time */
55 static struct loader_dri3_blit_context blit_context = {
56 _MTX_INITIALIZER_NP, NULL
57 };
58
59 static void
60 dri3_flush_present_events(struct loader_dri3_drawable *draw);
61
62 static struct loader_dri3_buffer *
63 dri3_find_back_alloc(struct loader_dri3_drawable *draw);
64
65 /**
66 * Do we have blit functionality in the image blit extension?
67 *
68 * \param draw[in] The drawable intended to blit from / to.
69 * \return true if we have blit functionality. false otherwise.
70 */
71 static bool loader_dri3_have_image_blit(const struct loader_dri3_drawable *draw)
72 {
73 return draw->ext->image->base.version >= 9 &&
74 draw->ext->image->blitImage != NULL;
75 }
76
77 /**
78 * Get and lock (for use with the current thread) a dri context associated
79 * with the drawable's dri screen. The context is intended to be used with
80 * the dri image extension's blitImage method.
81 *
82 * \param draw[in] Pointer to the drawable whose dri screen we want a
83 * dri context for.
84 * \return A dri context or NULL if context creation failed.
85 *
86 * When the caller is done with the context (even if the context returned was
87 * NULL), the caller must call loader_dri3_blit_context_put.
88 */
89 static __DRIcontext *
90 loader_dri3_blit_context_get(struct loader_dri3_drawable *draw)
91 {
92 mtx_lock(&blit_context.mtx);
93
94 if (blit_context.ctx && blit_context.cur_screen != draw->dri_screen) {
95 blit_context.core->destroyContext(blit_context.ctx);
96 blit_context.ctx = NULL;
97 }
98
99 if (!blit_context.ctx) {
100 blit_context.ctx = draw->ext->core->createNewContext(draw->dri_screen,
101 NULL, NULL, NULL);
102 blit_context.cur_screen = draw->dri_screen;
103 blit_context.core = draw->ext->core;
104 }
105
106 return blit_context.ctx;
107 }
108
109 /**
110 * Release (for use with other threads) a dri context previously obtained using
111 * loader_dri3_blit_context_get.
112 */
113 static void
114 loader_dri3_blit_context_put(void)
115 {
116 mtx_unlock(&blit_context.mtx);
117 }
118
119 /**
120 * Blit (parts of) the contents of a DRI image to another dri image
121 *
122 * \param draw[in] The drawable which owns the images.
123 * \param dst[in] The destination image.
124 * \param src[in] The source image.
125 * \param dstx0[in] Start destination coordinate.
126 * \param dsty0[in] Start destination coordinate.
127 * \param width[in] Blit width.
128 * \param height[in] Blit height.
129 * \param srcx0[in] Start source coordinate.
130 * \param srcy0[in] Start source coordinate.
131 * \param flush_flag[in] Image blit flush flag.
132 * \return true iff successful.
133 */
134 static bool
135 loader_dri3_blit_image(struct loader_dri3_drawable *draw,
136 __DRIimage *dst, __DRIimage *src,
137 int dstx0, int dsty0, int width, int height,
138 int srcx0, int srcy0, int flush_flag)
139 {
140 __DRIcontext *dri_context;
141 bool use_blit_context = false;
142
143 if (!loader_dri3_have_image_blit(draw))
144 return false;
145
146 dri_context = draw->vtable->get_dri_context(draw);
147
148 if (!dri_context || !draw->vtable->in_current_context(draw)) {
149 dri_context = loader_dri3_blit_context_get(draw);
150 use_blit_context = true;
151 flush_flag |= __BLIT_FLAG_FLUSH;
152 }
153
154 if (dri_context)
155 draw->ext->image->blitImage(dri_context, dst, src, dstx0, dsty0,
156 width, height, srcx0, srcy0,
157 width, height, flush_flag);
158
159 if (use_blit_context)
160 loader_dri3_blit_context_put();
161
162 return dri_context != NULL;
163 }
164
165 static inline void
166 dri3_fence_reset(xcb_connection_t *c, struct loader_dri3_buffer *buffer)
167 {
168 xshmfence_reset(buffer->shm_fence);
169 }
170
171 static inline void
172 dri3_fence_set(struct loader_dri3_buffer *buffer)
173 {
174 xshmfence_trigger(buffer->shm_fence);
175 }
176
177 static inline void
178 dri3_fence_trigger(xcb_connection_t *c, struct loader_dri3_buffer *buffer)
179 {
180 xcb_sync_trigger_fence(c, buffer->sync_fence);
181 }
182
183 static inline void
184 dri3_fence_await(xcb_connection_t *c, struct loader_dri3_buffer *buffer)
185 {
186 xcb_flush(c);
187 xshmfence_await(buffer->shm_fence);
188 }
189
190 static void
191 dri3_update_num_back(struct loader_dri3_drawable *draw)
192 {
193 if (draw->flipping)
194 draw->num_back = 3;
195 else
196 draw->num_back = 2;
197 }
198
199 void
200 loader_dri3_set_swap_interval(struct loader_dri3_drawable *draw, int interval)
201 {
202 draw->swap_interval = interval;
203 dri3_update_num_back(draw);
204 }
205
206 /** dri3_free_render_buffer
207 *
208 * Free everything associated with one render buffer including pixmap, fence
209 * stuff and the driver image
210 */
211 static void
212 dri3_free_render_buffer(struct loader_dri3_drawable *draw,
213 struct loader_dri3_buffer *buffer)
214 {
215 if (buffer->own_pixmap)
216 xcb_free_pixmap(draw->conn, buffer->pixmap);
217 xcb_sync_destroy_fence(draw->conn, buffer->sync_fence);
218 xshmfence_unmap_shm(buffer->shm_fence);
219 draw->ext->image->destroyImage(buffer->image);
220 if (buffer->linear_buffer)
221 draw->ext->image->destroyImage(buffer->linear_buffer);
222 free(buffer);
223 }
224
225 void
226 loader_dri3_drawable_fini(struct loader_dri3_drawable *draw)
227 {
228 int i;
229
230 draw->ext->core->destroyDrawable(draw->dri_drawable);
231
232 for (i = 0; i < LOADER_DRI3_NUM_BUFFERS; i++) {
233 if (draw->buffers[i])
234 dri3_free_render_buffer(draw, draw->buffers[i]);
235 }
236
237 if (draw->special_event) {
238 xcb_void_cookie_t cookie =
239 xcb_present_select_input_checked(draw->conn, draw->eid, draw->drawable,
240 XCB_PRESENT_EVENT_MASK_NO_EVENT);
241
242 xcb_discard_reply(draw->conn, cookie.sequence);
243 xcb_unregister_for_special_event(draw->conn, draw->special_event);
244 }
245 }
246
247 int
248 loader_dri3_drawable_init(xcb_connection_t *conn,
249 xcb_drawable_t drawable,
250 __DRIscreen *dri_screen,
251 bool is_different_gpu,
252 const __DRIconfig *dri_config,
253 struct loader_dri3_extensions *ext,
254 const struct loader_dri3_vtable *vtable,
255 struct loader_dri3_drawable *draw)
256 {
257 xcb_get_geometry_cookie_t cookie;
258 xcb_get_geometry_reply_t *reply;
259 xcb_generic_error_t *error;
260 GLint vblank_mode = DRI_CONF_VBLANK_DEF_INTERVAL_1;
261 int swap_interval;
262
263 draw->conn = conn;
264 draw->ext = ext;
265 draw->vtable = vtable;
266 draw->drawable = drawable;
267 draw->dri_screen = dri_screen;
268 draw->is_different_gpu = is_different_gpu;
269
270 draw->have_back = 0;
271 draw->have_fake_front = 0;
272 draw->first_init = true;
273
274 draw->cur_blit_source = -1;
275 draw->back_format = __DRI_IMAGE_FORMAT_NONE;
276
277 if (draw->ext->config)
278 draw->ext->config->configQueryi(draw->dri_screen,
279 "vblank_mode", &vblank_mode);
280
281 switch (vblank_mode) {
282 case DRI_CONF_VBLANK_NEVER:
283 case DRI_CONF_VBLANK_DEF_INTERVAL_0:
284 swap_interval = 0;
285 break;
286 case DRI_CONF_VBLANK_DEF_INTERVAL_1:
287 case DRI_CONF_VBLANK_ALWAYS_SYNC:
288 default:
289 swap_interval = 1;
290 break;
291 }
292 draw->swap_interval = swap_interval;
293
294 dri3_update_num_back(draw);
295
296 /* Create a new drawable */
297 draw->dri_drawable =
298 draw->ext->image_driver->createNewDrawable(dri_screen,
299 dri_config,
300 draw);
301
302 if (!draw->dri_drawable)
303 return 1;
304
305 cookie = xcb_get_geometry(draw->conn, draw->drawable);
306 reply = xcb_get_geometry_reply(draw->conn, cookie, &error);
307 if (reply == NULL || error != NULL) {
308 draw->ext->core->destroyDrawable(draw->dri_drawable);
309 return 1;
310 }
311
312 draw->width = reply->width;
313 draw->height = reply->height;
314 draw->depth = reply->depth;
315 draw->vtable->set_drawable_size(draw, draw->width, draw->height);
316 free(reply);
317
318 draw->swap_method = __DRI_ATTRIB_SWAP_UNDEFINED;
319 if (draw->ext->core->base.version >= 2) {
320 (void )draw->ext->core->getConfigAttrib(dri_config,
321 __DRI_ATTRIB_SWAP_METHOD,
322 &draw->swap_method);
323 }
324
325 /*
326 * Make sure server has the same swap interval we do for the new
327 * drawable.
328 */
329 loader_dri3_set_swap_interval(draw, swap_interval);
330
331 return 0;
332 }
333
334 /*
335 * Process one Present event
336 */
337 static void
338 dri3_handle_present_event(struct loader_dri3_drawable *draw,
339 xcb_present_generic_event_t *ge)
340 {
341 switch (ge->evtype) {
342 case XCB_PRESENT_CONFIGURE_NOTIFY: {
343 xcb_present_configure_notify_event_t *ce = (void *) ge;
344
345 draw->width = ce->width;
346 draw->height = ce->height;
347 draw->vtable->set_drawable_size(draw, draw->width, draw->height);
348 break;
349 }
350 case XCB_PRESENT_COMPLETE_NOTIFY: {
351 xcb_present_complete_notify_event_t *ce = (void *) ge;
352
353 /* Compute the processed SBC number from the received 32-bit serial number
354 * merged with the upper 32-bits of the sent 64-bit serial number while
355 * checking for wrap.
356 */
357 if (ce->kind == XCB_PRESENT_COMPLETE_KIND_PIXMAP) {
358 draw->recv_sbc = (draw->send_sbc & 0xffffffff00000000LL) | ce->serial;
359 if (draw->recv_sbc > draw->send_sbc)
360 draw->recv_sbc -= 0x100000000;
361 switch (ce->mode) {
362 case XCB_PRESENT_COMPLETE_MODE_FLIP:
363 draw->flipping = true;
364 break;
365 case XCB_PRESENT_COMPLETE_MODE_COPY:
366 draw->flipping = false;
367 break;
368 }
369 dri3_update_num_back(draw);
370
371 if (draw->vtable->show_fps)
372 draw->vtable->show_fps(draw, ce->ust);
373
374 draw->ust = ce->ust;
375 draw->msc = ce->msc;
376 } else {
377 draw->recv_msc_serial = ce->serial;
378 draw->notify_ust = ce->ust;
379 draw->notify_msc = ce->msc;
380 }
381 break;
382 }
383 case XCB_PRESENT_EVENT_IDLE_NOTIFY: {
384 xcb_present_idle_notify_event_t *ie = (void *) ge;
385 int b;
386
387 for (b = 0; b < sizeof(draw->buffers) / sizeof(draw->buffers[0]); b++) {
388 struct loader_dri3_buffer *buf = draw->buffers[b];
389
390 if (buf && buf->pixmap == ie->pixmap) {
391 buf->busy = 0;
392 if (draw->num_back <= b && b < LOADER_DRI3_MAX_BACK) {
393 dri3_free_render_buffer(draw, buf);
394 draw->buffers[b] = NULL;
395 }
396 break;
397 }
398 }
399 break;
400 }
401 }
402 free(ge);
403 }
404
405 static bool
406 dri3_wait_for_event(struct loader_dri3_drawable *draw)
407 {
408 xcb_generic_event_t *ev;
409 xcb_present_generic_event_t *ge;
410
411 xcb_flush(draw->conn);
412 ev = xcb_wait_for_special_event(draw->conn, draw->special_event);
413 if (!ev)
414 return false;
415 ge = (void *) ev;
416 dri3_handle_present_event(draw, ge);
417 return true;
418 }
419
420 /** loader_dri3_wait_for_msc
421 *
422 * Get the X server to send an event when the target msc/divisor/remainder is
423 * reached.
424 */
425 bool
426 loader_dri3_wait_for_msc(struct loader_dri3_drawable *draw,
427 int64_t target_msc,
428 int64_t divisor, int64_t remainder,
429 int64_t *ust, int64_t *msc, int64_t *sbc)
430 {
431 uint32_t msc_serial;
432
433 msc_serial = ++draw->send_msc_serial;
434 xcb_present_notify_msc(draw->conn,
435 draw->drawable,
436 msc_serial,
437 target_msc,
438 divisor,
439 remainder);
440
441 xcb_flush(draw->conn);
442
443 /* Wait for the event */
444 if (draw->special_event) {
445 while ((int32_t) (msc_serial - draw->recv_msc_serial) > 0) {
446 if (!dri3_wait_for_event(draw))
447 return false;
448 }
449 }
450
451 *ust = draw->notify_ust;
452 *msc = draw->notify_msc;
453 *sbc = draw->recv_sbc;
454
455 return true;
456 }
457
458 /** loader_dri3_wait_for_sbc
459 *
460 * Wait for the completed swap buffer count to reach the specified
461 * target. Presumably the application knows that this will be reached with
462 * outstanding complete events, or we're going to be here awhile.
463 */
464 int
465 loader_dri3_wait_for_sbc(struct loader_dri3_drawable *draw,
466 int64_t target_sbc, int64_t *ust,
467 int64_t *msc, int64_t *sbc)
468 {
469 /* From the GLX_OML_sync_control spec:
470 *
471 * "If <target_sbc> = 0, the function will block until all previous
472 * swaps requested with glXSwapBuffersMscOML for that window have
473 * completed."
474 */
475 if (!target_sbc)
476 target_sbc = draw->send_sbc;
477
478 while (draw->recv_sbc < target_sbc) {
479 if (!dri3_wait_for_event(draw))
480 return 0;
481 }
482
483 *ust = draw->ust;
484 *msc = draw->msc;
485 *sbc = draw->recv_sbc;
486 return 1;
487 }
488
489 /** loader_dri3_find_back
490 *
491 * Find an idle back buffer. If there isn't one, then
492 * wait for a present idle notify event from the X server
493 */
494 static int
495 dri3_find_back(struct loader_dri3_drawable *draw)
496 {
497 int b;
498 xcb_generic_event_t *ev;
499 xcb_present_generic_event_t *ge;
500 int num_to_consider = draw->num_back;
501
502 /* Increase the likelyhood of reusing current buffer */
503 dri3_flush_present_events(draw);
504
505 /* Check whether we need to reuse the current back buffer as new back.
506 * In that case, wait until it's not busy anymore.
507 */
508 if (!loader_dri3_have_image_blit(draw) && draw->cur_blit_source != -1) {
509 num_to_consider = 1;
510 draw->cur_blit_source = -1;
511 }
512
513 for (;;) {
514 for (b = 0; b < num_to_consider; b++) {
515 int id = LOADER_DRI3_BACK_ID((b + draw->cur_back) % draw->num_back);
516 struct loader_dri3_buffer *buffer = draw->buffers[id];
517
518 if (!buffer || !buffer->busy) {
519 draw->cur_back = id;
520 return id;
521 }
522 }
523 xcb_flush(draw->conn);
524 ev = xcb_wait_for_special_event(draw->conn, draw->special_event);
525 if (!ev)
526 return -1;
527 ge = (void *) ev;
528 dri3_handle_present_event(draw, ge);
529 }
530 }
531
532 static xcb_gcontext_t
533 dri3_drawable_gc(struct loader_dri3_drawable *draw)
534 {
535 if (!draw->gc) {
536 uint32_t v = 0;
537 xcb_create_gc(draw->conn,
538 (draw->gc = xcb_generate_id(draw->conn)),
539 draw->drawable,
540 XCB_GC_GRAPHICS_EXPOSURES,
541 &v);
542 }
543 return draw->gc;
544 }
545
546
547 static struct loader_dri3_buffer *
548 dri3_back_buffer(struct loader_dri3_drawable *draw)
549 {
550 return draw->buffers[LOADER_DRI3_BACK_ID(draw->cur_back)];
551 }
552
553 static struct loader_dri3_buffer *
554 dri3_fake_front_buffer(struct loader_dri3_drawable *draw)
555 {
556 return draw->buffers[LOADER_DRI3_FRONT_ID];
557 }
558
559 static void
560 dri3_copy_area(xcb_connection_t *c,
561 xcb_drawable_t src_drawable,
562 xcb_drawable_t dst_drawable,
563 xcb_gcontext_t gc,
564 int16_t src_x,
565 int16_t src_y,
566 int16_t dst_x,
567 int16_t dst_y,
568 uint16_t width,
569 uint16_t height)
570 {
571 xcb_void_cookie_t cookie;
572
573 cookie = xcb_copy_area_checked(c,
574 src_drawable,
575 dst_drawable,
576 gc,
577 src_x,
578 src_y,
579 dst_x,
580 dst_y,
581 width,
582 height);
583 xcb_discard_reply(c, cookie.sequence);
584 }
585
586 /**
587 * Asks the driver to flush any queued work necessary for serializing with the
588 * X command stream, and optionally the slightly more strict requirement of
589 * glFlush() equivalence (which would require flushing even if nothing had
590 * been drawn to a window system framebuffer, for example).
591 */
592 void
593 loader_dri3_flush(struct loader_dri3_drawable *draw,
594 unsigned flags,
595 enum __DRI2throttleReason throttle_reason)
596 {
597 /* NEED TO CHECK WHETHER CONTEXT IS NULL */
598 __DRIcontext *dri_context = draw->vtable->get_dri_context(draw);
599
600 if (dri_context) {
601 draw->ext->flush->flush_with_flags(dri_context, draw->dri_drawable,
602 flags, throttle_reason);
603 }
604 }
605
606 void
607 loader_dri3_copy_sub_buffer(struct loader_dri3_drawable *draw,
608 int x, int y,
609 int width, int height,
610 bool flush)
611 {
612 struct loader_dri3_buffer *back;
613 unsigned flags = __DRI2_FLUSH_DRAWABLE;
614
615 /* Check we have the right attachments */
616 if (!draw->have_back || draw->is_pixmap)
617 return;
618
619 if (flush)
620 flags |= __DRI2_FLUSH_CONTEXT;
621 loader_dri3_flush(draw, flags, __DRI2_THROTTLE_SWAPBUFFER);
622
623 back = dri3_find_back_alloc(draw);
624 if (!back)
625 return;
626
627 y = draw->height - y - height;
628
629 if (draw->is_different_gpu) {
630 /* Update the linear buffer part of the back buffer
631 * for the dri3_copy_area operation
632 */
633 (void) loader_dri3_blit_image(draw,
634 back->linear_buffer,
635 back->image,
636 0, 0, back->width, back->height,
637 0, 0, __BLIT_FLAG_FLUSH);
638 /* We use blit_image to update our fake front,
639 */
640 if (draw->have_fake_front)
641 (void) loader_dri3_blit_image(draw,
642 dri3_fake_front_buffer(draw)->image,
643 back->image,
644 x, y, width, height,
645 x, y, __BLIT_FLAG_FLUSH);
646 }
647
648 loader_dri3_swapbuffer_barrier(draw);
649 dri3_fence_reset(draw->conn, back);
650 dri3_copy_area(draw->conn,
651 back->pixmap,
652 draw->drawable,
653 dri3_drawable_gc(draw),
654 x, y, x, y, width, height);
655 dri3_fence_trigger(draw->conn, back);
656 /* Refresh the fake front (if present) after we just damaged the real
657 * front.
658 */
659 if (draw->have_fake_front && !draw->is_different_gpu) {
660 dri3_fence_reset(draw->conn, dri3_fake_front_buffer(draw));
661 dri3_copy_area(draw->conn,
662 back->pixmap,
663 dri3_fake_front_buffer(draw)->pixmap,
664 dri3_drawable_gc(draw),
665 x, y, x, y, width, height);
666 dri3_fence_trigger(draw->conn, dri3_fake_front_buffer(draw));
667 dri3_fence_await(draw->conn, dri3_fake_front_buffer(draw));
668 }
669 dri3_fence_await(draw->conn, back);
670 }
671
672 void
673 loader_dri3_copy_drawable(struct loader_dri3_drawable *draw,
674 xcb_drawable_t dest,
675 xcb_drawable_t src)
676 {
677 loader_dri3_flush(draw, __DRI2_FLUSH_DRAWABLE, 0);
678
679 dri3_fence_reset(draw->conn, dri3_fake_front_buffer(draw));
680 dri3_copy_area(draw->conn,
681 src, dest,
682 dri3_drawable_gc(draw),
683 0, 0, 0, 0, draw->width, draw->height);
684 dri3_fence_trigger(draw->conn, dri3_fake_front_buffer(draw));
685 dri3_fence_await(draw->conn, dri3_fake_front_buffer(draw));
686 }
687
688 void
689 loader_dri3_wait_x(struct loader_dri3_drawable *draw)
690 {
691 struct loader_dri3_buffer *front;
692
693 if (draw == NULL || !draw->have_fake_front)
694 return;
695
696 front = dri3_fake_front_buffer(draw);
697
698 loader_dri3_copy_drawable(draw, front->pixmap, draw->drawable);
699
700 /* In the psc->is_different_gpu case, the linear buffer has been updated,
701 * but not yet the tiled buffer.
702 * Copy back to the tiled buffer we use for rendering.
703 * Note that we don't need flushing.
704 */
705 if (draw->is_different_gpu)
706 (void) loader_dri3_blit_image(draw,
707 front->image,
708 front->linear_buffer,
709 0, 0, front->width, front->height,
710 0, 0, 0);
711 }
712
713 void
714 loader_dri3_wait_gl(struct loader_dri3_drawable *draw)
715 {
716 struct loader_dri3_buffer *front;
717
718 if (draw == NULL || !draw->have_fake_front)
719 return;
720
721 front = dri3_fake_front_buffer(draw);
722
723 /* In the psc->is_different_gpu case, we update the linear_buffer
724 * before updating the real front.
725 */
726 if (draw->is_different_gpu)
727 (void) loader_dri3_blit_image(draw,
728 front->linear_buffer,
729 front->image,
730 0, 0, front->width, front->height,
731 0, 0, __BLIT_FLAG_FLUSH);
732 loader_dri3_swapbuffer_barrier(draw);
733 loader_dri3_copy_drawable(draw, draw->drawable, front->pixmap);
734 }
735
736 /** dri3_flush_present_events
737 *
738 * Process any present events that have been received from the X server
739 */
740 static void
741 dri3_flush_present_events(struct loader_dri3_drawable *draw)
742 {
743 /* Check to see if any configuration changes have occurred
744 * since we were last invoked
745 */
746 if (draw->special_event) {
747 xcb_generic_event_t *ev;
748
749 while ((ev = xcb_poll_for_special_event(draw->conn,
750 draw->special_event)) != NULL) {
751 xcb_present_generic_event_t *ge = (void *) ev;
752 dri3_handle_present_event(draw, ge);
753 }
754 }
755 }
756
757 /** loader_dri3_swap_buffers_msc
758 *
759 * Make the current back buffer visible using the present extension
760 */
761 int64_t
762 loader_dri3_swap_buffers_msc(struct loader_dri3_drawable *draw,
763 int64_t target_msc, int64_t divisor,
764 int64_t remainder, unsigned flush_flags,
765 bool force_copy)
766 {
767 struct loader_dri3_buffer *back;
768 int64_t ret = 0;
769 uint32_t options = XCB_PRESENT_OPTION_NONE;
770
771 draw->vtable->flush_drawable(draw, flush_flags);
772
773 back = dri3_find_back_alloc(draw);
774
775 if (draw->is_different_gpu && back) {
776 /* Update the linear buffer before presenting the pixmap */
777 (void) loader_dri3_blit_image(draw,
778 back->linear_buffer,
779 back->image,
780 0, 0, back->width, back->height,
781 0, 0, __BLIT_FLAG_FLUSH);
782 }
783
784 /* If we need to preload the new back buffer, remember the source.
785 * The force_copy parameter is used by EGL to attempt to preserve
786 * the back buffer across a call to this function.
787 */
788 if (draw->swap_method != __DRI_ATTRIB_SWAP_UNDEFINED || force_copy)
789 draw->cur_blit_source = LOADER_DRI3_BACK_ID(draw->cur_back);
790
791 /* Exchange the back and fake front. Even though the server knows about these
792 * buffers, it has no notion of back and fake front.
793 */
794 if (back && draw->have_fake_front) {
795 struct loader_dri3_buffer *tmp;
796
797 tmp = dri3_fake_front_buffer(draw);
798 draw->buffers[LOADER_DRI3_FRONT_ID] = back;
799 draw->buffers[LOADER_DRI3_BACK_ID(draw->cur_back)] = tmp;
800
801 if (draw->swap_method == __DRI_ATTRIB_SWAP_COPY || force_copy)
802 draw->cur_blit_source = LOADER_DRI3_FRONT_ID;
803 }
804
805 dri3_flush_present_events(draw);
806
807 if (back && !draw->is_pixmap) {
808 dri3_fence_reset(draw->conn, back);
809
810 /* Compute when we want the frame shown by taking the last known
811 * successful MSC and adding in a swap interval for each outstanding swap
812 * request. target_msc=divisor=remainder=0 means "Use glXSwapBuffers()
813 * semantic"
814 */
815 ++draw->send_sbc;
816 if (target_msc == 0 && divisor == 0 && remainder == 0)
817 target_msc = draw->msc + draw->swap_interval *
818 (draw->send_sbc - draw->recv_sbc);
819 else if (divisor == 0 && remainder > 0) {
820 /* From the GLX_OML_sync_control spec:
821 * "If <divisor> = 0, the swap will occur when MSC becomes
822 * greater than or equal to <target_msc>."
823 *
824 * Note that there's no mention of the remainder. The Present
825 * extension throws BadValue for remainder != 0 with divisor == 0, so
826 * just drop the passed in value.
827 */
828 remainder = 0;
829 }
830
831 /* From the GLX_EXT_swap_control spec
832 * and the EGL 1.4 spec (page 53):
833 *
834 * "If <interval> is set to a value of 0, buffer swaps are not
835 * synchronized to a video frame."
836 *
837 * Implementation note: It is possible to enable triple buffering
838 * behaviour by not using XCB_PRESENT_OPTION_ASYNC, but this should not be
839 * the default.
840 */
841 if (draw->swap_interval == 0)
842 options |= XCB_PRESENT_OPTION_ASYNC;
843
844 /* If we need to populate the new back, but need to reuse the back
845 * buffer slot due to lack of local blit capabilities, make sure
846 * the server doesn't flip and we deadlock.
847 */
848 if (!loader_dri3_have_image_blit(draw) && draw->cur_blit_source != -1)
849 options |= XCB_PRESENT_OPTION_COPY;
850
851 back->busy = 1;
852 back->last_swap = draw->send_sbc;
853 xcb_present_pixmap(draw->conn,
854 draw->drawable,
855 back->pixmap,
856 (uint32_t) draw->send_sbc,
857 0, /* valid */
858 0, /* update */
859 0, /* x_off */
860 0, /* y_off */
861 None, /* target_crtc */
862 None,
863 back->sync_fence,
864 options,
865 target_msc,
866 divisor,
867 remainder, 0, NULL);
868 ret = (int64_t) draw->send_sbc;
869
870 /* Schedule a server-side back-preserving blit if necessary.
871 * This happens iff all conditions below are satisfied:
872 * a) We have a fake front,
873 * b) We need to preserve the back buffer,
874 * c) We don't have local blit capabilities.
875 */
876 if (!loader_dri3_have_image_blit(draw) && draw->cur_blit_source != -1 &&
877 draw->cur_blit_source != LOADER_DRI3_BACK_ID(draw->cur_back)) {
878 struct loader_dri3_buffer *new_back = dri3_back_buffer(draw);
879 struct loader_dri3_buffer *src = draw->buffers[draw->cur_blit_source];
880
881 dri3_fence_reset(draw->conn, new_back);
882 dri3_copy_area(draw->conn, src->pixmap,
883 new_back->pixmap,
884 dri3_drawable_gc(draw),
885 0, 0, 0, 0, draw->width, draw->height);
886 dri3_fence_trigger(draw->conn, new_back);
887 new_back->last_swap = src->last_swap;
888 }
889
890 xcb_flush(draw->conn);
891 if (draw->stamp)
892 ++(*draw->stamp);
893 }
894
895 draw->ext->flush->invalidate(draw->dri_drawable);
896
897 return ret;
898 }
899
900 int
901 loader_dri3_query_buffer_age(struct loader_dri3_drawable *draw)
902 {
903 struct loader_dri3_buffer *back = dri3_find_back_alloc(draw);
904
905 if (!back || back->last_swap == 0)
906 return 0;
907
908 return draw->send_sbc - back->last_swap + 1;
909 }
910
911 /** loader_dri3_open
912 *
913 * Wrapper around xcb_dri3_open
914 */
915 int
916 loader_dri3_open(xcb_connection_t *conn,
917 xcb_window_t root,
918 uint32_t provider)
919 {
920 xcb_dri3_open_cookie_t cookie;
921 xcb_dri3_open_reply_t *reply;
922 int fd;
923
924 cookie = xcb_dri3_open(conn,
925 root,
926 provider);
927
928 reply = xcb_dri3_open_reply(conn, cookie, NULL);
929 if (!reply)
930 return -1;
931
932 if (reply->nfd != 1) {
933 free(reply);
934 return -1;
935 }
936
937 fd = xcb_dri3_open_reply_fds(conn, reply)[0];
938 free(reply);
939 fcntl(fd, F_SETFD, fcntl(fd, F_GETFD) | FD_CLOEXEC);
940
941 return fd;
942 }
943
944 static uint32_t
945 dri3_cpp_for_format(uint32_t format) {
946 switch (format) {
947 case __DRI_IMAGE_FORMAT_R8:
948 return 1;
949 case __DRI_IMAGE_FORMAT_RGB565:
950 case __DRI_IMAGE_FORMAT_GR88:
951 return 2;
952 case __DRI_IMAGE_FORMAT_XRGB8888:
953 case __DRI_IMAGE_FORMAT_ARGB8888:
954 case __DRI_IMAGE_FORMAT_ABGR8888:
955 case __DRI_IMAGE_FORMAT_XBGR8888:
956 case __DRI_IMAGE_FORMAT_XRGB2101010:
957 case __DRI_IMAGE_FORMAT_ARGB2101010:
958 case __DRI_IMAGE_FORMAT_SARGB8:
959 return 4;
960 case __DRI_IMAGE_FORMAT_NONE:
961 default:
962 return 0;
963 }
964 }
965
966 /* the DRIimage createImage function takes __DRI_IMAGE_FORMAT codes, while
967 * the createImageFromFds call takes __DRI_IMAGE_FOURCC codes. To avoid
968 * complete confusion, just deal in __DRI_IMAGE_FORMAT codes for now and
969 * translate to __DRI_IMAGE_FOURCC codes in the call to createImageFromFds
970 */
971 static int
972 image_format_to_fourcc(int format)
973 {
974
975 /* Convert from __DRI_IMAGE_FORMAT to __DRI_IMAGE_FOURCC (sigh) */
976 switch (format) {
977 case __DRI_IMAGE_FORMAT_SARGB8: return __DRI_IMAGE_FOURCC_SARGB8888;
978 case __DRI_IMAGE_FORMAT_RGB565: return __DRI_IMAGE_FOURCC_RGB565;
979 case __DRI_IMAGE_FORMAT_XRGB8888: return __DRI_IMAGE_FOURCC_XRGB8888;
980 case __DRI_IMAGE_FORMAT_ARGB8888: return __DRI_IMAGE_FOURCC_ARGB8888;
981 case __DRI_IMAGE_FORMAT_ABGR8888: return __DRI_IMAGE_FOURCC_ABGR8888;
982 case __DRI_IMAGE_FORMAT_XBGR8888: return __DRI_IMAGE_FOURCC_XBGR8888;
983 }
984 return 0;
985 }
986
987 /** loader_dri3_alloc_render_buffer
988 *
989 * Use the driver createImage function to construct a __DRIimage, then
990 * get a file descriptor for that and create an X pixmap from that
991 *
992 * Allocate an xshmfence for synchronization
993 */
994 static struct loader_dri3_buffer *
995 dri3_alloc_render_buffer(struct loader_dri3_drawable *draw, unsigned int format,
996 int width, int height, int depth)
997 {
998 struct loader_dri3_buffer *buffer;
999 __DRIimage *pixmap_buffer;
1000 xcb_pixmap_t pixmap;
1001 xcb_sync_fence_t sync_fence;
1002 struct xshmfence *shm_fence;
1003 int buffer_fd, fence_fd;
1004 int stride;
1005
1006 /* Create an xshmfence object and
1007 * prepare to send that to the X server
1008 */
1009
1010 fence_fd = xshmfence_alloc_shm();
1011 if (fence_fd < 0)
1012 return NULL;
1013
1014 shm_fence = xshmfence_map_shm(fence_fd);
1015 if (shm_fence == NULL)
1016 goto no_shm_fence;
1017
1018 /* Allocate the image from the driver
1019 */
1020 buffer = calloc(1, sizeof *buffer);
1021 if (!buffer)
1022 goto no_buffer;
1023
1024 buffer->cpp = dri3_cpp_for_format(format);
1025 if (!buffer->cpp)
1026 goto no_image;
1027
1028 if (!draw->is_different_gpu) {
1029 buffer->image = draw->ext->image->createImage(draw->dri_screen,
1030 width, height,
1031 format,
1032 __DRI_IMAGE_USE_SHARE |
1033 __DRI_IMAGE_USE_SCANOUT |
1034 __DRI_IMAGE_USE_BACKBUFFER,
1035 buffer);
1036 pixmap_buffer = buffer->image;
1037
1038 if (!buffer->image)
1039 goto no_image;
1040 } else {
1041 buffer->image = draw->ext->image->createImage(draw->dri_screen,
1042 width, height,
1043 format,
1044 0,
1045 buffer);
1046
1047 if (!buffer->image)
1048 goto no_image;
1049
1050 buffer->linear_buffer =
1051 draw->ext->image->createImage(draw->dri_screen,
1052 width, height, format,
1053 __DRI_IMAGE_USE_SHARE |
1054 __DRI_IMAGE_USE_LINEAR |
1055 __DRI_IMAGE_USE_BACKBUFFER,
1056 buffer);
1057 pixmap_buffer = buffer->linear_buffer;
1058
1059 if (!buffer->linear_buffer)
1060 goto no_linear_buffer;
1061 }
1062
1063 /* X wants the stride, so ask the image for it
1064 */
1065 if (!draw->ext->image->queryImage(pixmap_buffer, __DRI_IMAGE_ATTRIB_STRIDE,
1066 &stride))
1067 goto no_buffer_attrib;
1068
1069 buffer->pitch = stride;
1070
1071 if (!draw->ext->image->queryImage(pixmap_buffer, __DRI_IMAGE_ATTRIB_FD,
1072 &buffer_fd))
1073 goto no_buffer_attrib;
1074
1075 xcb_dri3_pixmap_from_buffer(draw->conn,
1076 (pixmap = xcb_generate_id(draw->conn)),
1077 draw->drawable,
1078 buffer->size,
1079 width, height, buffer->pitch,
1080 depth, buffer->cpp * 8,
1081 buffer_fd);
1082
1083 xcb_dri3_fence_from_fd(draw->conn,
1084 pixmap,
1085 (sync_fence = xcb_generate_id(draw->conn)),
1086 false,
1087 fence_fd);
1088
1089 buffer->pixmap = pixmap;
1090 buffer->own_pixmap = true;
1091 buffer->sync_fence = sync_fence;
1092 buffer->shm_fence = shm_fence;
1093 buffer->width = width;
1094 buffer->height = height;
1095
1096 /* Mark the buffer as idle
1097 */
1098 dri3_fence_set(buffer);
1099
1100 return buffer;
1101
1102 no_buffer_attrib:
1103 draw->ext->image->destroyImage(pixmap_buffer);
1104 no_linear_buffer:
1105 if (draw->is_different_gpu)
1106 draw->ext->image->destroyImage(buffer->image);
1107 no_image:
1108 free(buffer);
1109 no_buffer:
1110 xshmfence_unmap_shm(shm_fence);
1111 no_shm_fence:
1112 close(fence_fd);
1113 return NULL;
1114 }
1115
1116 /** loader_dri3_update_drawable
1117 *
1118 * Called the first time we use the drawable and then
1119 * after we receive present configure notify events to
1120 * track the geometry of the drawable
1121 */
1122 static int
1123 dri3_update_drawable(__DRIdrawable *driDrawable,
1124 struct loader_dri3_drawable *draw)
1125 {
1126 if (draw->first_init) {
1127 xcb_get_geometry_cookie_t geom_cookie;
1128 xcb_get_geometry_reply_t *geom_reply;
1129 xcb_void_cookie_t cookie;
1130 xcb_generic_error_t *error;
1131 xcb_present_query_capabilities_cookie_t present_capabilities_cookie;
1132 xcb_present_query_capabilities_reply_t *present_capabilities_reply;
1133
1134 draw->first_init = false;
1135
1136 /* Try to select for input on the window.
1137 *
1138 * If the drawable is a window, this will get our events
1139 * delivered.
1140 *
1141 * Otherwise, we'll get a BadWindow error back from this request which
1142 * will let us know that the drawable is a pixmap instead.
1143 */
1144
1145 draw->eid = xcb_generate_id(draw->conn);
1146 cookie =
1147 xcb_present_select_input_checked(draw->conn, draw->eid, draw->drawable,
1148 XCB_PRESENT_EVENT_MASK_CONFIGURE_NOTIFY |
1149 XCB_PRESENT_EVENT_MASK_COMPLETE_NOTIFY |
1150 XCB_PRESENT_EVENT_MASK_IDLE_NOTIFY);
1151
1152 present_capabilities_cookie =
1153 xcb_present_query_capabilities(draw->conn, draw->drawable);
1154
1155 /* Create an XCB event queue to hold present events outside of the usual
1156 * application event queue
1157 */
1158 draw->special_event = xcb_register_for_special_xge(draw->conn,
1159 &xcb_present_id,
1160 draw->eid,
1161 draw->stamp);
1162 geom_cookie = xcb_get_geometry(draw->conn, draw->drawable);
1163
1164 geom_reply = xcb_get_geometry_reply(draw->conn, geom_cookie, NULL);
1165
1166 if (!geom_reply)
1167 return false;
1168
1169 draw->width = geom_reply->width;
1170 draw->height = geom_reply->height;
1171 draw->depth = geom_reply->depth;
1172 draw->vtable->set_drawable_size(draw, draw->width, draw->height);
1173
1174 free(geom_reply);
1175
1176 draw->is_pixmap = false;
1177
1178 /* Check to see if our select input call failed. If it failed with a
1179 * BadWindow error, then assume the drawable is a pixmap. Destroy the
1180 * special event queue created above and mark the drawable as a pixmap
1181 */
1182
1183 error = xcb_request_check(draw->conn, cookie);
1184
1185 present_capabilities_reply =
1186 xcb_present_query_capabilities_reply(draw->conn,
1187 present_capabilities_cookie,
1188 NULL);
1189
1190 if (present_capabilities_reply) {
1191 draw->present_capabilities = present_capabilities_reply->capabilities;
1192 free(present_capabilities_reply);
1193 } else
1194 draw->present_capabilities = 0;
1195
1196 if (error) {
1197 if (error->error_code != BadWindow) {
1198 free(error);
1199 return false;
1200 }
1201 draw->is_pixmap = true;
1202 xcb_unregister_for_special_event(draw->conn, draw->special_event);
1203 draw->special_event = NULL;
1204 }
1205 }
1206 dri3_flush_present_events(draw);
1207 return true;
1208 }
1209
1210 __DRIimage *
1211 loader_dri3_create_image(xcb_connection_t *c,
1212 xcb_dri3_buffer_from_pixmap_reply_t *bp_reply,
1213 unsigned int format,
1214 __DRIscreen *dri_screen,
1215 const __DRIimageExtension *image,
1216 void *loaderPrivate)
1217 {
1218 int *fds;
1219 __DRIimage *image_planar, *ret;
1220 int stride, offset;
1221
1222 /* Get an FD for the pixmap object
1223 */
1224 fds = xcb_dri3_buffer_from_pixmap_reply_fds(c, bp_reply);
1225
1226 stride = bp_reply->stride;
1227 offset = 0;
1228
1229 /* createImageFromFds creates a wrapper __DRIimage structure which
1230 * can deal with multiple planes for things like Yuv images. So, once
1231 * we've gotten the planar wrapper, pull the single plane out of it and
1232 * discard the wrapper.
1233 */
1234 image_planar = image->createImageFromFds(dri_screen,
1235 bp_reply->width,
1236 bp_reply->height,
1237 image_format_to_fourcc(format),
1238 fds, 1,
1239 &stride, &offset, loaderPrivate);
1240 close(fds[0]);
1241 if (!image_planar)
1242 return NULL;
1243
1244 ret = image->fromPlanar(image_planar, 0, loaderPrivate);
1245
1246 image->destroyImage(image_planar);
1247
1248 return ret;
1249 }
1250
1251 /** dri3_get_pixmap_buffer
1252 *
1253 * Get the DRM object for a pixmap from the X server and
1254 * wrap that with a __DRIimage structure using createImageFromFds
1255 */
1256 static struct loader_dri3_buffer *
1257 dri3_get_pixmap_buffer(__DRIdrawable *driDrawable, unsigned int format,
1258 enum loader_dri3_buffer_type buffer_type,
1259 struct loader_dri3_drawable *draw)
1260 {
1261 int buf_id = loader_dri3_pixmap_buf_id(buffer_type);
1262 struct loader_dri3_buffer *buffer = draw->buffers[buf_id];
1263 xcb_drawable_t pixmap;
1264 xcb_dri3_buffer_from_pixmap_cookie_t bp_cookie;
1265 xcb_dri3_buffer_from_pixmap_reply_t *bp_reply;
1266 xcb_sync_fence_t sync_fence;
1267 struct xshmfence *shm_fence;
1268 int fence_fd;
1269
1270 if (buffer)
1271 return buffer;
1272
1273 pixmap = draw->drawable;
1274
1275 buffer = calloc(1, sizeof *buffer);
1276 if (!buffer)
1277 goto no_buffer;
1278
1279 fence_fd = xshmfence_alloc_shm();
1280 if (fence_fd < 0)
1281 goto no_fence;
1282 shm_fence = xshmfence_map_shm(fence_fd);
1283 if (shm_fence == NULL) {
1284 close (fence_fd);
1285 goto no_fence;
1286 }
1287
1288 xcb_dri3_fence_from_fd(draw->conn,
1289 pixmap,
1290 (sync_fence = xcb_generate_id(draw->conn)),
1291 false,
1292 fence_fd);
1293
1294 bp_cookie = xcb_dri3_buffer_from_pixmap(draw->conn, pixmap);
1295 bp_reply = xcb_dri3_buffer_from_pixmap_reply(draw->conn, bp_cookie, NULL);
1296 if (!bp_reply)
1297 goto no_image;
1298
1299 buffer->image = loader_dri3_create_image(draw->conn, bp_reply, format,
1300 draw->dri_screen, draw->ext->image,
1301 buffer);
1302 if (!buffer->image)
1303 goto no_image;
1304
1305 buffer->pixmap = pixmap;
1306 buffer->own_pixmap = false;
1307 buffer->width = bp_reply->width;
1308 buffer->height = bp_reply->height;
1309 buffer->shm_fence = shm_fence;
1310 buffer->sync_fence = sync_fence;
1311
1312 draw->buffers[buf_id] = buffer;
1313
1314 free(bp_reply);
1315
1316 return buffer;
1317
1318 no_image:
1319 free(bp_reply);
1320 xcb_sync_destroy_fence(draw->conn, sync_fence);
1321 xshmfence_unmap_shm(shm_fence);
1322 no_fence:
1323 free(buffer);
1324 no_buffer:
1325 return NULL;
1326 }
1327
1328 /** dri3_get_buffer
1329 *
1330 * Find a front or back buffer, allocating new ones as necessary
1331 */
1332 static struct loader_dri3_buffer *
1333 dri3_get_buffer(__DRIdrawable *driDrawable,
1334 unsigned int format,
1335 enum loader_dri3_buffer_type buffer_type,
1336 struct loader_dri3_drawable *draw)
1337 {
1338 struct loader_dri3_buffer *buffer;
1339 int buf_id;
1340
1341 if (buffer_type == loader_dri3_buffer_back) {
1342 draw->back_format = format;
1343
1344 buf_id = dri3_find_back(draw);
1345
1346 if (buf_id < 0)
1347 return NULL;
1348 } else {
1349 buf_id = LOADER_DRI3_FRONT_ID;
1350 }
1351
1352 buffer = draw->buffers[buf_id];
1353
1354 /* Allocate a new buffer if there isn't an old one, or if that
1355 * old one is the wrong size
1356 */
1357 if (!buffer || buffer->width != draw->width ||
1358 buffer->height != draw->height) {
1359 struct loader_dri3_buffer *new_buffer;
1360
1361 /* Allocate the new buffers
1362 */
1363 new_buffer = dri3_alloc_render_buffer(draw,
1364 format,
1365 draw->width,
1366 draw->height,
1367 draw->depth);
1368 if (!new_buffer)
1369 return NULL;
1370
1371 /* When resizing, copy the contents of the old buffer, waiting for that
1372 * copy to complete using our fences before proceeding
1373 */
1374 switch (buffer_type) {
1375 case loader_dri3_buffer_back:
1376 if (buffer) {
1377 if (!buffer->linear_buffer) {
1378 dri3_fence_reset(draw->conn, new_buffer);
1379 dri3_fence_await(draw->conn, buffer);
1380 dri3_copy_area(draw->conn,
1381 buffer->pixmap,
1382 new_buffer->pixmap,
1383 dri3_drawable_gc(draw),
1384 0, 0, 0, 0,
1385 draw->width, draw->height);
1386 dri3_fence_trigger(draw->conn, new_buffer);
1387 } else if (draw->vtable->in_current_context(draw)) {
1388 (void) loader_dri3_blit_image(draw,
1389 new_buffer->image,
1390 buffer->image,
1391 0, 0, draw->width, draw->height,
1392 0, 0, 0);
1393 }
1394 dri3_free_render_buffer(draw, buffer);
1395 }
1396 break;
1397 case loader_dri3_buffer_front:
1398 loader_dri3_swapbuffer_barrier(draw);
1399 dri3_fence_reset(draw->conn, new_buffer);
1400 dri3_copy_area(draw->conn,
1401 draw->drawable,
1402 new_buffer->pixmap,
1403 dri3_drawable_gc(draw),
1404 0, 0, 0, 0,
1405 draw->width, draw->height);
1406 dri3_fence_trigger(draw->conn, new_buffer);
1407
1408 if (new_buffer->linear_buffer &&
1409 draw->vtable->in_current_context(draw)) {
1410 dri3_fence_await(draw->conn, new_buffer);
1411 (void) loader_dri3_blit_image(draw,
1412 new_buffer->image,
1413 new_buffer->linear_buffer,
1414 0, 0, draw->width, draw->height,
1415 0, 0, 0);
1416 }
1417 break;
1418 }
1419 buffer = new_buffer;
1420 draw->buffers[buf_id] = buffer;
1421 }
1422 dri3_fence_await(draw->conn, buffer);
1423
1424 /*
1425 * Do we need to preserve the content of a previous buffer?
1426 *
1427 * Note that this blit is needed only to avoid a wait for a buffer that
1428 * is currently in the flip chain or being scanned out from. That's really
1429 * a tradeoff. If we're ok with the wait we can reduce the number of back
1430 * buffers to 1 for SWAP_EXCHANGE, and 1 for SWAP_COPY,
1431 * but in the latter case we must disallow page-flipping.
1432 */
1433 if (buffer_type == loader_dri3_buffer_back &&
1434 draw->cur_blit_source != -1 &&
1435 draw->buffers[draw->cur_blit_source] &&
1436 buffer != draw->buffers[draw->cur_blit_source]) {
1437
1438 struct loader_dri3_buffer *source = draw->buffers[draw->cur_blit_source];
1439
1440 /* Avoid flushing here. Will propably do good for tiling hardware. */
1441 (void) loader_dri3_blit_image(draw,
1442 buffer->image,
1443 source->image,
1444 0, 0, draw->width, draw->height,
1445 0, 0, 0);
1446 buffer->last_swap = source->last_swap;
1447 draw->cur_blit_source = -1;
1448 }
1449 /* Return the requested buffer */
1450 return buffer;
1451 }
1452
1453 /** dri3_free_buffers
1454 *
1455 * Free the front bufffer or all of the back buffers. Used
1456 * when the application changes which buffers it needs
1457 */
1458 static void
1459 dri3_free_buffers(__DRIdrawable *driDrawable,
1460 enum loader_dri3_buffer_type buffer_type,
1461 struct loader_dri3_drawable *draw)
1462 {
1463 struct loader_dri3_buffer *buffer;
1464 int first_id;
1465 int n_id;
1466 int buf_id;
1467
1468 switch (buffer_type) {
1469 case loader_dri3_buffer_back:
1470 first_id = LOADER_DRI3_BACK_ID(0);
1471 n_id = LOADER_DRI3_MAX_BACK;
1472 break;
1473 case loader_dri3_buffer_front:
1474 first_id = LOADER_DRI3_FRONT_ID;
1475 n_id = 1;
1476 }
1477
1478 for (buf_id = first_id; buf_id < first_id + n_id; buf_id++) {
1479 buffer = draw->buffers[buf_id];
1480 if (buffer) {
1481 dri3_free_render_buffer(draw, buffer);
1482 draw->buffers[buf_id] = NULL;
1483 }
1484 }
1485 }
1486
1487 /** loader_dri3_get_buffers
1488 *
1489 * The published buffer allocation API.
1490 * Returns all of the necessary buffers, allocating
1491 * as needed.
1492 */
1493 int
1494 loader_dri3_get_buffers(__DRIdrawable *driDrawable,
1495 unsigned int format,
1496 uint32_t *stamp,
1497 void *loaderPrivate,
1498 uint32_t buffer_mask,
1499 struct __DRIimageList *buffers)
1500 {
1501 struct loader_dri3_drawable *draw = loaderPrivate;
1502 struct loader_dri3_buffer *front, *back;
1503
1504 buffers->image_mask = 0;
1505 buffers->front = NULL;
1506 buffers->back = NULL;
1507
1508 front = NULL;
1509 back = NULL;
1510
1511 if (!dri3_update_drawable(driDrawable, draw))
1512 return false;
1513
1514 /* pixmaps always have front buffers.
1515 * Exchange swaps also mandate fake front buffers.
1516 */
1517 if (draw->is_pixmap || draw->swap_method == __DRI_ATTRIB_SWAP_EXCHANGE)
1518 buffer_mask |= __DRI_IMAGE_BUFFER_FRONT;
1519
1520 if (buffer_mask & __DRI_IMAGE_BUFFER_FRONT) {
1521 /* All pixmaps are owned by the server gpu.
1522 * When we use a different gpu, we can't use the pixmap
1523 * as buffer since it is potentially tiled a way
1524 * our device can't understand. In this case, use
1525 * a fake front buffer. Hopefully the pixmap
1526 * content will get synced with the fake front
1527 * buffer.
1528 */
1529 if (draw->is_pixmap && !draw->is_different_gpu)
1530 front = dri3_get_pixmap_buffer(driDrawable,
1531 format,
1532 loader_dri3_buffer_front,
1533 draw);
1534 else
1535 front = dri3_get_buffer(driDrawable,
1536 format,
1537 loader_dri3_buffer_front,
1538 draw);
1539
1540 if (!front)
1541 return false;
1542 } else {
1543 dri3_free_buffers(driDrawable, loader_dri3_buffer_front, draw);
1544 draw->have_fake_front = 0;
1545 }
1546
1547 if (buffer_mask & __DRI_IMAGE_BUFFER_BACK) {
1548 back = dri3_get_buffer(driDrawable,
1549 format,
1550 loader_dri3_buffer_back,
1551 draw);
1552 if (!back)
1553 return false;
1554 draw->have_back = 1;
1555 } else {
1556 dri3_free_buffers(driDrawable, loader_dri3_buffer_back, draw);
1557 draw->have_back = 0;
1558 }
1559
1560 if (front) {
1561 buffers->image_mask |= __DRI_IMAGE_BUFFER_FRONT;
1562 buffers->front = front->image;
1563 draw->have_fake_front = draw->is_different_gpu || !draw->is_pixmap;
1564 }
1565
1566 if (back) {
1567 buffers->image_mask |= __DRI_IMAGE_BUFFER_BACK;
1568 buffers->back = back->image;
1569 }
1570
1571 draw->stamp = stamp;
1572
1573 return true;
1574 }
1575
1576 /** loader_dri3_update_drawable_geometry
1577 *
1578 * Get the current drawable geometry.
1579 */
1580 void
1581 loader_dri3_update_drawable_geometry(struct loader_dri3_drawable *draw)
1582 {
1583 xcb_get_geometry_cookie_t geom_cookie;
1584 xcb_get_geometry_reply_t *geom_reply;
1585
1586 geom_cookie = xcb_get_geometry(draw->conn, draw->drawable);
1587
1588 geom_reply = xcb_get_geometry_reply(draw->conn, geom_cookie, NULL);
1589
1590 if (geom_reply) {
1591 draw->width = geom_reply->width;
1592 draw->height = geom_reply->height;
1593 draw->vtable->set_drawable_size(draw, draw->width, draw->height);
1594
1595 free(geom_reply);
1596 }
1597 }
1598
1599
1600 /**
1601 * Make sure the server has flushed all pending swap buffers to hardware
1602 * for this drawable. Ideally we'd want to send an X protocol request to
1603 * have the server block our connection until the swaps are complete. That
1604 * would avoid the potential round-trip here.
1605 */
1606 void
1607 loader_dri3_swapbuffer_barrier(struct loader_dri3_drawable *draw)
1608 {
1609 int64_t ust, msc, sbc;
1610
1611 (void) loader_dri3_wait_for_sbc(draw, 0, &ust, &msc, &sbc);
1612 }
1613
1614 /**
1615 * Perform any cleanup associated with a close screen operation.
1616 * \param dri_screen[in,out] Pointer to __DRIscreen about to be closed.
1617 *
1618 * This function destroys the screen's cached swap context if any.
1619 */
1620 void
1621 loader_dri3_close_screen(__DRIscreen *dri_screen)
1622 {
1623 mtx_lock(&blit_context.mtx);
1624 if (blit_context.ctx && blit_context.cur_screen == dri_screen) {
1625 blit_context.core->destroyContext(blit_context.ctx);
1626 blit_context.ctx = NULL;
1627 }
1628 mtx_unlock(&blit_context.mtx);
1629 }
1630
1631 /**
1632 * Find a backbuffer slot - potentially allocating a back buffer
1633 *
1634 * \param draw[in,out] Pointer to the drawable for which to find back.
1635 * \return Pointer to a new back buffer or NULL if allocation failed or was
1636 * not mandated.
1637 *
1638 * Find a potentially new back buffer, and if it's not been allocated yet and
1639 * in addition needs initializing, then try to allocate and initialize it.
1640 */
1641 static struct loader_dri3_buffer *
1642 dri3_find_back_alloc(struct loader_dri3_drawable *draw)
1643 {
1644 struct loader_dri3_buffer *back;
1645 int id;
1646
1647 id = dri3_find_back(draw);
1648 back = (id >= 0) ? draw->buffers[id] : NULL;
1649
1650 if (back || (id >= 0 && draw->back_format != __DRI_IMAGE_FORMAT_NONE)) {
1651 if (dri3_update_drawable(draw->dri_drawable, draw)) {
1652 (void) dri3_get_buffer(draw->dri_drawable,
1653 draw->back_format,
1654 loader_dri3_buffer_back,
1655 draw);
1656 back = (id >= 0) ? draw->buffers[id] : NULL;
1657 }
1658 }
1659
1660 return back;
1661 }