fix a name collision on Ubuntu, allow forcing shaders off by passing -1 as shader
This commit is contained in:
parent
4f4452dcc2
commit
18fa1c9cde
5 changed files with 13 additions and 8 deletions
|
@ -55,6 +55,7 @@ class CFont;
|
|||
extern char ForceUseOpenGL;
|
||||
extern bool UseOpenGL;
|
||||
extern bool ZoomNoResize;
|
||||
extern bool GLShaderPipelineSupported;
|
||||
#endif
|
||||
|
||||
class CGraphic : public gcn::Image
|
||||
|
|
|
@ -480,6 +480,7 @@ static void Usage()
|
|||
"\t-x\t\tControls fullscreen scaling if your graphics card supports shaders.\n"\
|
||||
"\t \t\tPass 1 for nearest-neigubour, 2 for EPX/AdvMame, 3 for HQx, 4 for SAL, 5 for SuperEagle\n"\
|
||||
"\t \t\tYou can also use Ctrl+Alt+/ to cycle between these scaling algorithms at runtime.\n"
|
||||
"\t \t\tPass -1 to force old-school nearest neighbour scaling without shaders\n"\
|
||||
"\t-Z\t\tUse OpenGL to scale the screen to the viewport (retro-style). Implies -O.\n"
|
||||
#endif
|
||||
"map is relative to StratagusLibPath=datapath, use ./map for relative to cwd\n",
|
||||
|
@ -634,6 +635,9 @@ void ParseCommandLine(int argc, char **argv, Parameters ¶meters)
|
|||
#if defined(USE_OPENGL) || defined(USE_GLES)
|
||||
case 'x':
|
||||
ShaderIndex = atoi(optarg) % MAX_SHADERS;
|
||||
if (atoi(optarg) == -1) {
|
||||
GLShaderPipelineSupported = false;
|
||||
}
|
||||
continue;
|
||||
case 'Z':
|
||||
ForceUseOpenGL = 1;
|
||||
|
|
|
@ -123,7 +123,6 @@ GLint GLMaxTextureSize = 256; /// Max texture size supported on the video card
|
|||
GLint GLMaxTextureSizeOverride; /// User-specified limit for ::GLMaxTextureSize
|
||||
bool GLTextureCompressionSupported; /// Is OpenGL texture compression supported
|
||||
bool UseGLTextureCompression; /// Use OpenGL texture compression
|
||||
bool GLShaderPipelineSupported;
|
||||
#endif
|
||||
|
||||
static std::map<int, std::string> Key2Str;
|
||||
|
@ -259,7 +258,7 @@ static void InitOpenGLExtensions()
|
|||
GLTextureCompressionSupported = false;
|
||||
}
|
||||
|
||||
GLShaderPipelineSupported = LoadShaderExtensions();
|
||||
GLShaderPipelineSupported = GLShaderPipelineSupported && LoadShaderExtensions();
|
||||
#else
|
||||
GLTextureCompressionSupported = false;
|
||||
GLShaderPipelineSupported = false;
|
||||
|
|
|
@ -335,7 +335,7 @@ PFNGLGETPROGRAMIVPROC glGetProgramiv;
|
|||
PFNGLGETSHADERINFOLOGPROC glGetShaderInfoLog;
|
||||
PFNGLGETPROGRAMINFOLOGPROC glGetProgramInfoLog;
|
||||
PFNGLGETUNIFORMLOCATIONPROC glGetUniformLocation;
|
||||
PFNGLACTIVETEXTUREPROC glActiveTexture;
|
||||
PFNGLACTIVETEXTUREPROC glActiveTextureProc;
|
||||
PFNGLUNIFORM1FPROC glUniform1f;
|
||||
PFNGLUNIFORM1IPROC glUniform1i;
|
||||
|
||||
|
@ -385,7 +385,7 @@ void printProgramInfoLog(GLuint obj, const char* prefix)
|
|||
}
|
||||
}
|
||||
|
||||
extern unsigned ShaderIndex = 0;
|
||||
unsigned ShaderIndex = 0;
|
||||
|
||||
extern void LoadShaders() {
|
||||
GLuint vs, fs;
|
||||
|
@ -427,7 +427,7 @@ extern bool LoadShaderExtensions() {
|
|||
glDeleteShader = (PFNGLDELETESHADERPROC)(uintptr_t)SDL_GL_GetProcAddress("glDeleteShader");
|
||||
|
||||
glGetUniformLocation = (PFNGLGETUNIFORMLOCATIONPROC)(uintptr_t)SDL_GL_GetProcAddress("glGetUniformLocation");
|
||||
glActiveTexture = (PFNGLACTIVETEXTUREPROC)(uintptr_t)SDL_GL_GetProcAddress("glActiveTexture");
|
||||
glActiveTextureProc = (PFNGLACTIVETEXTUREPROC)(uintptr_t)SDL_GL_GetProcAddress("glActiveTexture");
|
||||
glUniform1f = (PFNGLUNIFORM1FPROC)(uintptr_t)SDL_GL_GetProcAddress("glUniform1f");
|
||||
glUniform1i = (PFNGLUNIFORM1IPROC)(uintptr_t)SDL_GL_GetProcAddress("glUniform1i");
|
||||
|
||||
|
@ -441,7 +441,7 @@ extern bool LoadShaderExtensions() {
|
|||
glDrawBuffers = (PFNGLDRAWBUFFERSPROC)(uintptr_t)SDL_GL_GetProcAddress("glDrawBuffers");
|
||||
glCheckFramebufferStatus = (PFNGLCHECKFRAMEBUFFERSTATUSEXTPROC)(uintptr_t)SDL_GL_GetProcAddress("glCheckFramebufferStatus");
|
||||
|
||||
if (glCreateShader && glGenFramebuffers && glGetUniformLocation && glActiveTexture) {
|
||||
if (glCreateShader && glGenFramebuffers && glGetUniformLocation && glActiveTextureProc) {
|
||||
LoadShaders();
|
||||
return true;
|
||||
} else {
|
||||
|
@ -482,7 +482,7 @@ extern void RenderFramebufferToScreen() {
|
|||
glUniform1f(widthrelloc, (float)Video.Width / (float)Video.ViewportWidth);
|
||||
glUniform1f(heightrelloc, (float)Video.Height / (float)Video.ViewportHeight);
|
||||
glUniform1i(textureloc, 0);
|
||||
glActiveTexture(GL_TEXTURE0);
|
||||
glActiveTextureProc(GL_TEXTURE0);
|
||||
// render the framebuffer texture to a fullscreen quad on the real display
|
||||
glBindTexture(GL_TEXTURE_2D, fullscreenTexture);
|
||||
glBegin(GL_QUADS);
|
||||
|
@ -499,4 +499,4 @@ extern void RenderFramebufferToScreen() {
|
|||
glUseProgram(0); // Disable shaders again, and render to framebuffer again
|
||||
glBindFramebuffer(GL_FRAMEBUFFER_EXT, fullscreenFramebuffer);
|
||||
}
|
||||
#endif
|
||||
#endif
|
||||
|
|
|
@ -170,6 +170,7 @@ CVideo Video;
|
|||
char ForceUseOpenGL;
|
||||
bool UseOpenGL; /// Use OpenGL
|
||||
bool ZoomNoResize;
|
||||
bool GLShaderPipelineSupported = true;
|
||||
#endif
|
||||
|
||||
char VideoForceFullScreen; /// fullscreen set from commandline
|
||||
|
|
Loading…
Reference in a new issue