#ifdef GLX_DIRECT_RENDERING
#ifdef GLX_USE_APPLEGL
-#include "apple_glx_context.h"
-#include "apple_glx.h"
+#include "apple/apple_glx_context.h"
+#include "apple/apple_glx.h"
#else
#include <sys/time.h>
#ifdef XF86VIDMODE
#include <X11/extensions/xf86vmode.h>
#endif
-#include "xf86dri.h"
#endif
-#else
#endif
#include <X11/Xlib-xcb.h>
/* Check to see if the GL is supported on this screen */
*ppsc = (*ppriv)->screens[scrn];
- if ((*ppsc)->configs == NULL) {
+ if ((*ppsc)->configs == NULL && (*ppsc)->visuals == NULL) {
/* No support for GL on this screen regardless of visual */
return GLX_BAD_VISUAL;
}
return NULL;
}
+/**
+ * Verifies context's GLX_RENDER_TYPE value with config.
+ *
+ * \param config GLX FBConfig which will support the returned renderType.
+ * \param renderType The context render type to be verified.
+ * \return True if the value of context renderType was approved, or 0 if no
+ * valid value was found.
+ */
+Bool
+validate_renderType_against_config(const struct glx_config *config,
+ int renderType)
+{
+ switch (renderType) {
+ case GLX_RGBA_TYPE:
+ return (config->renderType & GLX_RGBA_BIT) != 0;
+ case GLX_COLOR_INDEX_TYPE:
+ return (config->renderType & GLX_COLOR_INDEX_BIT) != 0;
+ case GLX_RGBA_FLOAT_TYPE_ARB:
+ return (config->renderType & GLX_RGBA_FLOAT_BIT_ARB) != 0;
+ case GLX_RGBA_UNSIGNED_FLOAT_TYPE_EXT:
+ return (config->renderType & GLX_RGBA_UNSIGNED_FLOAT_BIT_EXT) != 0;
+ default:
+ break;
+ }
+ return 0;
+}
+
_X_HIDDEN Bool
glx_context_init(struct glx_context *gc,
struct glx_screen *psc, struct glx_config *config)
gc->share_xid = shareList ? shareList->xid : None;
gc->imported = GL_FALSE;
- gc->renderType = renderType;
return (GLXContext) gc;
}
#if defined(GLX_DIRECT_RENDERING) || defined(GLX_USE_APPLEGL)
struct glx_screen *const psc = GetGLXScreenConfigs(dpy, vis->screen);
- config = glx_config_find_visual(psc->visuals, vis->visualid);
+ if (psc)
+ config = glx_config_find_visual(psc->visuals, vis->visualid);
+
if (config == NULL) {
xError error;
return None;
}
- renderType = config->rgbMode ? GLX_RGBA_TYPE : GLX_COLOR_INDEX_TYPE;
+ /* Choose the context render type based on DRI config values. It is
+ * unusual to set this type from config, but we have no other choice, as
+ * this old API does not provide renderType parameter.
+ */
+ if (config->renderType & GLX_RGBA_FLOAT_BIT_ARB) {
+ renderType = GLX_RGBA_FLOAT_TYPE_ARB;
+ } else if (config->renderType & GLX_RGBA_UNSIGNED_FLOAT_BIT_EXT) {
+ renderType = GLX_RGBA_UNSIGNED_FLOAT_TYPE_EXT;
+ } else if (config->renderType & GLX_RGBA_BIT) {
+ renderType = GLX_RGBA_TYPE;
+ } else if (config->renderType & GLX_COLOR_INDEX_BIT) {
+ renderType = GLX_COLOR_INDEX_TYPE;
+ } else if (config->rgbMode) {
+ /* If we're here, then renderType is not set correctly. Let's use a
+ * safeguard - any TrueColor or DirectColor mode is RGB mode. Such
+ * default value is needed by old DRI drivers, which didn't set
+ * renderType correctly as the value was just ignored.
+ */
+ renderType = GLX_RGBA_TYPE;
+ } else {
+ /* Safeguard - only one option left, all non-RGB modes are indexed
+ * modes. Again, this allows drivers with invalid renderType to work
+ * properly.
+ */
+ renderType = GLX_COLOR_INDEX_TYPE;
+ }
#endif
return CreateContext(dpy, vis->visualid, config, shareList, allowDirect,
}
/*
-** Query the existance of the GLX extension
+** Query the existence of the GLX extension
*/
_X_EXPORT Bool
glXQueryExtension(Display * dpy, int *errorBase, int *eventBase)
GLXPixmap xid;
CARD8 opcode;
+#if defined(GLX_DIRECT_RENDERING) && !defined(GLX_USE_APPLEGL)
+ struct glx_display *const priv = __glXInitialize(dpy);
+
+ if (priv == NULL)
+ return None;
+#endif
+
opcode = __glXSetupForCommand(dpy);
if (!opcode) {
return None;
/* FIXME: Maybe delay __DRIdrawable creation until the drawable
* is actually bound to a context... */
- struct glx_display *const priv = __glXInitialize(dpy);
__GLXDRIdrawable *pdraw;
struct glx_screen *psc;
struct glx_config *config;
struct glx_display *const priv = __glXInitialize(dpy);
__GLXDRIdrawable *pdraw = GetGLXDRIDrawable(dpy, glxpixmap);
- if (pdraw != NULL) {
+ if (priv != NULL && pdraw != NULL) {
(*pdraw->destroyDrawable) (pdraw);
__glxHashDelete(priv->drawHash, glxpixmap);
}
config->visualID = (XID) GLX_DONT_CARE;
config->visualType = GLX_DONT_CARE;
- /* glXChooseFBConfig specifies different defaults for these two than
+ /* glXChooseFBConfig specifies different defaults for these properties than
* glXChooseVisual.
*/
if (fbconfig_style_tags) {
config->rgbMode = GL_TRUE;
config->doubleBufferMode = GLX_DONT_CARE;
+ config->renderType = GLX_RGBA_BIT;
}
+ config->drawableType = GLX_WINDOW_BIT;
config->visualRating = GLX_DONT_CARE;
config->transparentPixel = GLX_NONE;
config->transparentRed = GLX_DONT_CARE;
config->transparentAlpha = GLX_DONT_CARE;
config->transparentIndex = GLX_DONT_CARE;
- config->drawableType = GLX_WINDOW_BIT;
- config->renderType =
- (config->rgbMode) ? GLX_RGBA_BIT : GLX_COLOR_INDEX_BIT;
config->xRenderable = GLX_DONT_CARE;
config->fbconfigID = (GLXFBConfigID) (GLX_DONT_CARE);
fbconfig_compare(struct glx_config **a, struct glx_config **b)
{
/* The order of these comparisons must NOT change. It is defined by
- * the GLX 1.3 spec and ARB_multisample.
+ * the GLX 1.4 specification.
*/
PREFER_SMALLER(visualSelectGroup);
PREFER_SMALLER(numAuxBuffers);
+ PREFER_SMALLER(sampleBuffers);
+ PREFER_SMALLER(samples);
+
PREFER_LARGER_OR_ZERO(depthBits);
PREFER_SMALLER(stencilBits);
PREFER_SMALLER(visualType);
- /* None of the multisample specs say where this comparison should happen,
- * so I put it near the end.
- */
- PREFER_SMALLER(sampleBuffers);
- PREFER_SMALLER(samples);
-
/* None of the pbuffer or fbconfig specs say that this comparison needs
* to happen at all, but it seems like it should.
*/
return *str;
}
-void
-__glXClientInfo(Display * dpy, int opcode)
-{
- char *ext_str = __glXGetClientGLExtensionString();
- int size = strlen(ext_str) + 1;
-
- xcb_connection_t *c = XGetXCBConnection(dpy);
- xcb_glx_client_info(c,
- GLX_MAJOR_VERSION, GLX_MINOR_VERSION, size, ext_str);
-
- free(ext_str);
-}
-
/*
** EXT_import_context
uint32_t screen = 0;
Bool got_screen = False;
+ if (priv == NULL)
+ return NULL;
+
/* The GLX_EXT_import_context spec says:
*
* "If <contextID> does not refer to a valid context, then a BadContext
psc = GetGLXScreenConfigs( gc->currentDpy, gc->screen);
#ifdef GLX_DIRECT_RENDERING
- if (gc->isDirect && psc->driScreen && psc->driScreen->setSwapInterval) {
+ if (gc->isDirect && psc && psc->driScreen &&
+ psc->driScreen->setSwapInterval) {
__GLXDRIdrawable *pdraw =
GetGLXDRIDrawable(gc->currentDpy, gc->currentDrawable);
psc->driScreen->setSwapInterval(pdraw, interval);
struct glx_screen *psc;
psc = GetGLXScreenConfigs( gc->currentDpy, gc->screen);
- if (psc->driScreen && psc->driScreen->setSwapInterval) {
+ if (psc && psc->driScreen && psc->driScreen->setSwapInterval) {
__GLXDRIdrawable *pdraw =
GetGLXDRIDrawable(gc->currentDpy, gc->currentDrawable);
return psc->driScreen->setSwapInterval(pdraw, interval);
struct glx_screen *psc;
psc = GetGLXScreenConfigs( gc->currentDpy, gc->screen);
- if (psc->driScreen && psc->driScreen->getSwapInterval) {
+ if (psc && psc->driScreen && psc->driScreen->getSwapInterval) {
__GLXDRIdrawable *pdraw =
GetGLXDRIDrawable(gc->currentDpy, gc->currentDrawable);
return psc->driScreen->getSwapInterval(pdraw);
* FIXME: documentation for the GLX encoding.
*/
#ifdef GLX_DIRECT_RENDERING
- if (psc->driScreen && psc->driScreen->getDrawableMSC) {
+ if (psc && psc->driScreen && psc->driScreen->getDrawableMSC) {
ret = psc->driScreen->getDrawableMSC(psc, pdraw, &ust, &msc, &sbc);
*count = (unsigned) msc;
return (ret == True) ? 0 : GLX_BAD_CONTEXT;
#endif
#ifdef GLX_DIRECT_RENDERING
- if (psc->driScreen && psc->driScreen->waitForMSC) {
+ if (psc && psc->driScreen && psc->driScreen->waitForMSC) {
ret = psc->driScreen->waitForMSC(pdraw, 0, divisor, remainder, &ust, &msc,
&sbc);
*count = (unsigned) msc;
#if defined(GLX_DIRECT_RENDERING) && !defined(GLX_USE_APPLEGL)
_X_HIDDEN GLboolean
-__glxGetMscRate(__GLXDRIdrawable *glxDraw,
+__glxGetMscRate(struct glx_screen *psc,
int32_t * numerator, int32_t * denominator)
{
#ifdef XF86VIDMODE
- struct glx_screen *psc;
XF86VidModeModeLine mode_line;
int dot_clock;
int i;
- psc = glxDraw->psc;
if (XF86VidModeQueryVersion(psc->dpy, &i, &i) &&
XF86VidModeGetModeLine(psc->dpy, psc->scr, &dot_clock, &mode_line)) {
unsigned n = dot_clock * 1000;
if (draw == NULL)
return False;
- return __glxGetMscRate(draw, numerator, denominator);
+ return __glxGetMscRate(draw->psc, numerator, denominator);
#else
(void) dpy;
(void) drawable;
#endif /* GLX_USE_APPLEGL */
-/**
- * \c strdup is actually not a standard ANSI C or POSIX routine.
- * Irix will not define it if ANSI mode is in effect.
- *
- * \sa strdup
- */
-_X_HIDDEN char *
-__glXstrdup(const char *str)
-{
- char *copy;
- copy = malloc(strlen(str) + 1);
- if (!copy)
- return NULL;
- strcpy(copy, str);
- return copy;
-}
-
/*
** glXGetProcAddress support
*/
GLX_FUNCTION2(glXReleaseTexImageEXT, __glXReleaseTexImageEXT),
#endif
-#if defined(GLX_DIRECT_RENDERING) && !defined(GLX_USE_APPLEGL)
+#if defined(GLX_DIRECT_RENDERING) && defined(GLX_USE_DRM)
/*** DRI configuration ***/
GLX_FUNCTION(glXGetScreenDriver),
GLX_FUNCTION(glXGetDriverConfig),
/*** GLX_ARB_create_context and GLX_ARB_create_context_profile ***/
GLX_FUNCTION(glXCreateContextAttribsARB),
+ /*** GLX_MESA_query_renderer ***/
+ GLX_FUNCTION(glXQueryRendererIntegerMESA),
+ GLX_FUNCTION(glXQueryRendererStringMESA),
+ GLX_FUNCTION(glXQueryCurrentRendererIntegerMESA),
+ GLX_FUNCTION(glXQueryCurrentRendererStringMESA),
+
{NULL, NULL} /* end of list */
};