Reputation: 1472
I'm running Mac OS X Lion and I'm trying to write a basic OpenGl program but my fragment shader isn't working. When I don't include it, I get my black triangle but when I do the screen is just white. I also get no errors loading it. What is the best way to debug this? Here are my shaders:
Vertex:
#version 120
attribute vec2 coord2d;
void main(void) {
gl_Position = vec4(coord2d, 0.0, 1.0);
}
Fragment:
#version 120
void main(void) {
gl_FragColor[0] = 1.0;
gl_FragColor[1] = 1.0;
gl_FragColor[2] = 0.0;
}
And the code to load my shaders I gained from this tutorial.
Edited to add more information
int init_resources()
{
GLfloat triangle_vertices[] = {
0.0f, 0.8f,
-0.8f, -0.8f,
0.8f, -0.8f,
};
glGenBuffers(1, &vbo_triangle);
glBindBuffer(GL_ARRAY_BUFFER, vbo_triangle);
glBufferData(GL_ARRAY_BUFFER, sizeof(triangle_vertices), triangle_vertices, GL_STATIC_DRAW);
GLint link_ok = GL_FALSE;
GLuint vs, fs;
if ((vs = create_shader("vertShader.sh", GL_VERTEX_SHADER)) == 0) return 0;
if ((fs = create_shader("fragShader.sh", GL_FRAGMENT_SHADER)) == 0) return 0;
program = glCreateProgram();
glAttachShader(program, vs);
glAttachShader(program, fs);
glLinkProgram(program);
glGetProgramiv(program, GL_LINK_STATUS, &link_ok);
if (!link_ok) {
fprintf(stderr, "glLinkProgram:");
print_log(program);
return 0;
}
const char* attribute_name = "coord2d";
attribute_coord2d = glGetAttribLocation(program, attribute_name);
if (attribute_coord2d == -1) {
fprintf(stderr, "Could not bind attribute %s\n", attribute_name);
return 0;
}
return 1;
}
void onDisplay()
{
glClearColor(1.0f, 1.0f, 1.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT| GL_DEPTH_BUFFER_BIT);
glUseProgram(program);
glEnableVertexAttribArray(attribute_coord2d);
glBindBuffer(GL_ARRAY_BUFFER, vbo_triangle);
glVertexAttribPointer(
attribute_coord2d,
2,
GL_FLOAT,
GL_FALSE,
0,
0
);
glDrawArrays(GL_TRIANGLES, 0, 3);
glDisableVertexAttribArray(attribute_coord2d);
glutSwapBuffers();
}
GLuint create_shader(const char* filename, GLenum type)
{
const GLchar* source = file_read(filename);
if (source == NULL) {
fprintf(stderr, "Error opening %s: ", filename); perror("");
return 0;
}
GLuint res = glCreateShader(type);
glShaderSource(res, 1, source, NULL);
free((void*)source);
glCompileShader(res);
GLint compile_ok = GL_FALSE;
glGetShaderiv(res, GL_COMPILE_STATUS, &compile_ok);
if (compile_ok == GL_FALSE) {
fprintf(stderr, "%s:", filename);
print_log(res);
glDeleteShader(res);
return 0;
}
return res;
}
Upvotes: 3
Views: 2706
Reputation: 633
Meanwhile you found your solution, it seems, but I wanted to give you another tip, which helped me (also quite new to all this) a lot recently, particularly if you are working on Mac OS X:
I suggest you install the "Graphics Tools for XCode" (available from the developer downloads section), which will help you a lot to develop your shaders, in particular:
OpenGL Shader Builder which helps you develop and test shaders
OpenGL Profiler which not only helps you to profile your application, but you can also see what resources are loaded etc and in your case: You can set a breakpoint to stop execution (for example before swapping) and then view the shader that is loaded, and modify it at runtime, to see what effect the change has
Here is the User Guide for OpenGL Profiler: https://developer.apple.com/library/mac/#documentation/GraphicsImaging/Conceptual/OpenGLProfilerUserGuide/Introduction/Introduction.html#//apple_ref/doc/uid/TP40006475-CH1-DontLinkElementID_31
Hope that helps
Upvotes: 2
Reputation: 1472
Had to add gl_FragColor[3] = 1.0; as I wasn't setting opacity.
Upvotes: 2