When I try to compile GLSL shaders for my application on Mac, it fails with a version conflict error of the shader:
Error compiling vertex shader:
ERROR: 0:1: '' : version '130' is not supported
Error compiling shader:
ERROR: 0:1: '' : version '130' is not supported
The shaders are as such:
Vertex shader:
#version 130
in vec2 in_vPos;
in vec2 in_vTexCoord;
out vec2 s_vTexCoord;
void main()
{
gl_Position = vec4(in_vPos, 0, 1);
s_vTexCoord = (in_vTexCoord + vec2(1, 1)) / 2;
}
Fragment shader:
#version 130
in vec2 s_vTexCoord;
out vec4 s_colOut;
uniform sampler2DRect s_texSampler;
uniform vec4 s_colBlend;
void main()
{
vec4 pixel = texture(s_texSampler, s_vTexCoord * textureSize(s_texSampler));
s_colOut = s_colBlend * pixel;
}
This is how I initialize my SDL renderer and Glew:
ren_pRenderer = SDL_CreateRenderer(window, -1, SDL_RENDERER_ACCELERATED | SDL_RENDERER_PRESENTVSYNC);
SDL_GL_SetAttribute(SDL_GL_MULTISAMPLEBUFFERS, 1);
SDL_GL_SetAttribute(SDL_GL_MULTISAMPLESAMPLES, 4);
glEnable(GL_MULTISAMPLE);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MAJOR_VERSION, 3);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MINOR_VERSION, 2);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_PROFILE_MASK, SDL_GL_CONTEXT_PROFILE_CORE | SDL_GL_CONTEXT_FORWARD_COMPATIBLE_FLAG);
SDL_SetHint(SDL_HINT_RENDER_SCALE_QUALITY, "1");
static bool _bInitializedGlew = false;
if(!_bInitializedGlew) {
glewExperimental = GL_TRUE;
GLenum err = glewInit();
if(err != GLEW_OK) {
printf("Glew initialization error: %d\n", err);
}
_bInitializedGlew = true;
}
And this is how I compile the shaders:
unsigned int CShader::CompileShader(const char* str, int type, bool &bSuccess)
{
// create a new shader
GLuint iShader = glCreateShader(type);
const GLchar* aSourceVertex[] = { str };
glShaderSource(iShader, 1, aSourceVertex, NULL);
glCompileShader(iShader);
// check if compiling went okay
GLint bShaderCompiled = GL_FALSE;
glGetShaderiv(iShader, GL_COMPILE_STATUS, &bShaderCompiled);
if(bShaderCompiled != GL_TRUE) {
// it did not.
printf("Error compiling %sshader:\n", (type == GL_VERTEX_SHADER ? "vertex " : ""));
int iLogLength = 0;
int iMaxLength = 0;
glGetShaderiv(iShader, GL_INFO_LOG_LENGTH, &iMaxLength);
char* buffer = new char[iMaxLength];
glGetShaderInfoLog(iShader, iMaxLength, &iLogLength, buffer);
if(iLogLength > 0) {
printf("%s\n", buffer);
}
delete[] buffer;
// report it back
bSuccess = false;
// and delete the shader
glDeleteShader(iShader);
} else {
// it worked!
bSuccess = true;
}
// return shader
return iShader;
}
I looked around for answers before and couldn't find a conclusive Stackoverflow answer that could help me, though it did point me in the right direction with SDL_GL_SetAttribute
for setting the GL version and core profile. So in above code, I set it to 3.1 Core, which, according to Wikipedia, is #version 140
, but even if I use that, I get the same error:
Error compiling vertex shader:
ERROR: 0:1: '' : version '140' is not supported
Error compiling shader:
ERROR: 0:1: '' : version '140' is not supported
Edit: I changed the version in attributes to 3.2 Core + Forward Compat, and now using #version 150
I get the same issue, saying 150 is not supported.
When I print the result of glGetString(GL_SHADING_LANGUAGE_VERSION)
I get 1.20
.
Even if I explicitly create the context after creating the SDL window, it still reports 1.20
:
win_pWindow = SDL_CreateWindow(strTitle, iX, iY, width, height, SDL_WINDOW_OPENGL | SDL_WINDOW_SHOWN | ulFlags);
win_pContext = SDL_GL_CreateContext(win_pWindow);
printf("GLSL version: %s\n", glGetString(GL_SHADING_LANGUAGE_VERSION));
#version 140
would be enough. – Allomorphism