err = EGL_BAD_ATTRIBUTE;
break;
}
- surf->RenderBuffer = val;
+ surf->RequestedRenderBuffer = val;
break;
case EGL_POST_SUB_BUFFER_SUPPORTED_NV:
if (!dpy->Extensions.NV_post_sub_buffer ||
surf->TextureTarget = EGL_NO_TEXTURE;
surf->MipmapTexture = EGL_FALSE;
surf->LargestPbuffer = EGL_FALSE;
- surf->RenderBuffer = renderBuffer;
+ surf->RequestedRenderBuffer = renderBuffer;
+ surf->ActiveRenderBuffer = renderBuffer;
surf->VGAlphaFormat = EGL_VG_ALPHA_FORMAT_NONPRE;
surf->VGColorspace = EGL_VG_COLORSPACE_sRGB;
surf->GLColorspace = EGL_GL_COLORSPACE_LINEAR_KHR;
*value = surface->SwapBehavior;
break;
case EGL_RENDER_BUFFER:
- *value = surface->RenderBuffer;
+ /* From the EGL 1.5 spec (2014.08.27):
+ *
+ * Querying EGL_RENDER_BUFFER returns the buffer which client API
+ * rendering is requested to use. For a window surface, this is the
+ * same attribute value specified when the surface was created. For
+ * a pbuffer surface, it is always EGL_BACK_BUFFER . For a pixmap
+ * surface, it is always EGL_SINGLE_BUFFER . To determine the actual
+ * buffer being rendered to by a context, call eglQueryContext.
+ */
+ switch (surface->Type) {
+ default:
+ unreachable("bad EGLSurface type");
+ case EGL_WINDOW_BIT:
+ *value = surface->RequestedRenderBuffer;
+ break;
+ case EGL_PBUFFER_BIT:
+ *value = EGL_BACK_BUFFER;
+ break;
+ case EGL_PIXMAP_BIT:
+ *value = EGL_SINGLE_BUFFER;
+ break;
+ }
break;
case EGL_PIXEL_ASPECT_RATIO:
*value = surface->AspectRatio;