X11: Stop including X11 headers from Clonk headers

X11 headers #define common symbols like Status and Success, which leads to
very puzzling error messages when included from random Clonk code
stable-5.2
Günther Brammer 2009-06-05 19:57:37 +02:00
parent 760df013ba
commit ad3c2509ca
3 changed files with 13 additions and 13 deletions

View File

@ -24,14 +24,6 @@
#include <GL/glew.h> #include <GL/glew.h>
#ifdef USE_X11
// Xmd.h typedefs BOOL to CARD8, but we want int
#define BOOL _BOOL
#include <X11/Xmd.h>
#undef BOOL
#include <GL/glx.h>
#endif
#if defined(__APPLE__) #if defined(__APPLE__)
#include <OpenGL/glu.h> #include <OpenGL/glu.h>
#else #else
@ -69,7 +61,7 @@ class CStdGLCtx
HWND hWindow; // used if pWindow==NULL HWND hWindow; // used if pWindow==NULL
HDC hDC; // device context handle HDC hDC; // device context handle
#elif defined(USE_X11) #elif defined(USE_X11)
GLXContext ctx; /*GLXContext*/void * ctx;
#endif #endif
int cx,cy; // context window size int cx,cy; // context window size

View File

@ -193,7 +193,12 @@ bool CStdGL::ApplyGammaRamp(D3DGAMMARAMP &ramp, bool fForce)
#elif defined(USE_X11) #elif defined(USE_X11)
// Xmd.h typedefs BOOL to CARD8, whereas microsoft windows and Clonk use int
#define BOOL _BOOL
#include <X11/Xmd.h>
#include <GL/glx.h>
#include <X11/extensions/xf86vmode.h> #include <X11/extensions/xf86vmode.h>
#undef BOOL
CStdGLCtx::CStdGLCtx(): pWindow(0), ctx(0), cx(0), cy(0) { } CStdGLCtx::CStdGLCtx(): pWindow(0), ctx(0), cx(0), cy(0) { }
@ -202,7 +207,7 @@ void CStdGLCtx::Clear()
Deselect(); Deselect();
if (ctx) if (ctx)
{ {
glXDestroyContext(pWindow->dpy, ctx); glXDestroyContext(pWindow->dpy, (GLXContext)ctx);
ctx = 0; ctx = 0;
} }
pWindow = 0; pWindow = 0;
@ -218,10 +223,10 @@ bool CStdGLCtx::Init(CStdWindow * pWindow, CStdApp *)
// Create Context with sharing (if this is the main context, our ctx will be 0, so no sharing) // Create Context with sharing (if this is the main context, our ctx will be 0, so no sharing)
// try direct rendering first // try direct rendering first
if (!DDrawCfg.NoAcceleration) if (!DDrawCfg.NoAcceleration)
ctx = glXCreateContext(pWindow->dpy, (XVisualInfo*)pWindow->Info, pGL->MainCtx.ctx, True); ctx = glXCreateContext(pWindow->dpy, (XVisualInfo*)pWindow->Info, (GLXContext)pGL->MainCtx.ctx, True);
// without, rendering will be unacceptable slow, but that's better than nothing at all // without, rendering will be unacceptable slow, but that's better than nothing at all
if (!ctx) if (!ctx)
ctx = glXCreateContext(pWindow->dpy, (XVisualInfo*)pWindow->Info, pGL->MainCtx.ctx, False); ctx = glXCreateContext(pWindow->dpy, (XVisualInfo*)pWindow->Info, (GLXContext)pGL->MainCtx.ctx, False);
// No luck at all? // No luck at all?
if (!ctx) return pGL->Error(" gl: Unable to create context"); if (!ctx) return pGL->Error(" gl: Unable to create context");
if (!Select(true)) return pGL->Error(" gl: Unable to select context"); if (!Select(true)) return pGL->Error(" gl: Unable to select context");
@ -249,7 +254,7 @@ bool CStdGLCtx::Select(bool verbose)
return false; return false;
} }
// make context current // make context current
if (!pWindow->renderwnd || !glXMakeCurrent(pWindow->dpy, pWindow->renderwnd, ctx)) if (!pWindow->renderwnd || !glXMakeCurrent(pWindow->dpy, pWindow->renderwnd, (GLXContext)ctx))
{ {
if (verbose) pGL->Error(" gl: glXMakeCurrent failed"); if (verbose) pGL->Error(" gl: glXMakeCurrent failed");
return false; return false;

View File

@ -30,11 +30,14 @@
#include "../../engine/inc/C4Version.h" #include "../../engine/inc/C4Version.h"
#ifdef USE_X11 #ifdef USE_X11
#define BOOL _BOOL
#include "../../engine/res/c4x.xpm" #include "../../engine/res/c4x.xpm"
#include <X11/Xlib.h> #include <X11/Xlib.h>
#include <X11/xpm.h> #include <X11/xpm.h>
#include <X11/Xatom.h> #include <X11/Xatom.h>
#include <X11/extensions/xf86vmode.h> #include <X11/extensions/xf86vmode.h>
#include <GL/glx.h>
#undef BOOL
#endif #endif
#include <string> #include <string>