{
gc->majorOpcode = __glXSetupForCommand(psc->display->dpy);
if (!gc->majorOpcode)
- return GL_FALSE;
+ return False;
gc->screen = psc->scr;
gc->psc = psc;
gc->isDirect = GL_TRUE;
gc->currentContextTag = -1;
- return GL_TRUE;
+ return True;
}
/* Init the extension. This fetches the major and minor version. */
priv = __glXInitialize(dpy);
if (!priv)
- return GL_FALSE;
+ return False;
if (major)
*major = priv->majorVersion;
if (minor)
*minor = priv->minorVersion;
- return GL_TRUE;
+ return True;
}
/*
* \param dpy Display where the context was created.
* \param contextID ID of the context to be tested.
*
- * \returns \c GL_TRUE if the context is direct rendering or not.
+ * \returns \c True if the context is direct rendering or not.
*/
static Bool
__glXIsDirect(Display * dpy, GLXContextID contextID)
opcode = __glXSetupForCommand(dpy);
if (!opcode) {
- return GL_FALSE;
+ return False;
}
c = XGetXCBConnection(dpy);
/**
* \todo
- * Shouldn't this function \b always return \c GL_FALSE when
+ * Shouldn't this function \b always return \c False when
* \c GLX_DIRECT_RENDERING is not defined? Do we really need to bother with
* the GLX protocol here at all?
*/
struct glx_context *gc = (struct glx_context *) gc_user;
if (!gc) {
- return GL_FALSE;
+ return False;
}
else if (gc->isDirect) {
- return GL_TRUE;
+ return True;
}
#ifdef GLX_USE_APPLEGL /* TODO: indirect on darwin */
- return GL_FALSE;
+ return False;
#else
return __glXIsDirect(dpy, gc->xid);
#endif
** supported by the OpenGL implementation on the server.
*/
if ((status == GLX_BAD_VISUAL) && (attribute == GLX_USE_GL)) {
- *value_return = GL_FALSE;
+ *value_return = False;
status = Success;
}