Reputation: 43
Yes, this is another "Black screen in OpenGL, help!" post. I have done my due diligence, here is what I have tried so far to diagnose:
My program is a dead simple one that sets up a buffer to render a triangle (Actually there's much more scaffolding since this is adapted from a previous renderer I had in d3d11, but I pared it down and down until it was basically just tutorial code). In Renderdoc you can see the vertex positions coming through no problem, and it seems to understand that the fragment shader is linked and executing as well, yet all I see is a black screen.
If anybody knows any gotchas or debugging steps, please by all means throw your ideas at me. I'll post my main rendering code, shaders, and a couple screens from renderdoc so you can see I'm not crazy. Also I know there's some stuff in here that wouldn't make sense to do per-frame. This is just as bare bones simple as I could get it to sanity check. My shader loading code and window init code is bog standard so I won't bother to post it, but if anybody thinks it would help to see I'm happy to.
main loop
glViewport(0, 0, 300, 300);
ASSERT(glGetError() == GL_NO_ERROR);
GLuint VertexArrayID;
glGenVertexArrays(1, &VertexArrayID);
ASSERT(glGetError() == GL_NO_ERROR);
glBindVertexArray(VertexArrayID);
ASSERT(glGetError() == GL_NO_ERROR);
static const GLfloat g_vertex_buffer_data[] = {
-1.0f, -1.0f, 0.0f,
1.0f, -1.0f, 0.0f,
0.0f, 1.0f, 0.0f,
};
GLuint vertexbuffer;
glGenBuffers(1, &vertexbuffer);
ASSERT(glGetError() == GL_NO_ERROR);
glBindBuffer(GL_ARRAY_BUFFER, vertexbuffer);
ASSERT(glGetError() == GL_NO_ERROR);
glBufferData(GL_ARRAY_BUFFER, sizeof(g_vertex_buffer_data), g_vertex_buffer_data, GL_STATIC_DRAW);
ASSERT(glGetError() == GL_NO_ERROR);
glEnableVertexAttribArray(0);
ASSERT(glGetError() == GL_NO_ERROR);
glBindBuffer(GL_ARRAY_BUFFER, vertexbuffer);
ASSERT(glGetError() == GL_NO_ERROR);
glVertexAttribPointer(
0,
3,
GL_FLOAT,
GL_FALSE,
0,
(void*)0
);
ASSERT(glGetError() == GL_NO_ERROR);
glDrawArrays(GL_TRIANGLES, 0, 3);
ASSERT(glGetError() == GL_NO_ERROR);
glDisableVertexAttribArray(0);
ASSERT(glGetError() == GL_NO_ERROR);
glfwPollEvents();
glfwSwapBuffers(glfwGetCurrentContext());
vert shader
#version 330 core
layout(location = 0) in vec3 inPos;
layout(location = 0) out vec4 outPos;
void main(){
outPos.xyz = inPos;
outPos.w = 1.0;
}
frag shader
#version 330 core
layout(location = 0) out vec3 color;
void main(){
color = vec3(1,0,0);
}
Upvotes: 0
Views: 609
Reputation: 210877
You need to write the gl_Position
in the vertex shader. gl_Position
cannot be replaced with a custom output variable (like gl_FragColor
in the fragment shader):
#version 330 core
layout(location = 0) in vec3 inPos;
void main()
{
gl_Position.xyz = vec4(inPos, 1.0);
}
See OpenGL Shading Language 4.60 Specification - 7.1.1. Vertex Shader Special Variables
Note, the outputs of the vertex shader are inputs to the next shader stage (in your case the fragment shader), but the vertex position (gl_Position
) is required for following steps in the Rendering Pipeline.
Upvotes: 1