Reputation: 1088
I am running a simple program to draw a red triangle. However, it only shows a black window. I suspect it might be a problem with the shader, because if I remove the shader, it perfectly draws a white triangle.
Here's my code:
#include <GL/glew.h>
#include <GLFW/glfw3.h>
#include <iostream>
static unsigned int CompileShader(unsigned int type, const std::string& source){
unsigned int id = glCreateShader(type);
const char* src = source.c_str();
glShaderSource(id, 1, &src, nullptr);
glCompileShader(id);
int res;
glGetShaderiv(id, GL_COMPILE_STATUS, &res);
if(res == GL_FALSE){
int length;
glGetShaderiv(id, GL_INFO_LOG_LENGTH, &length);
char* message = (char*)alloca(length * sizeof(char));
glGetShaderInfoLog(id, length, &length, message);
std::cout << "Failed to compile " << (type == GL_VERTEX_SHADER ? "vertex" : "fragment") << " shader!" << std::endl;
std::cout << message << std::endl;
glDeleteShader(id);
return 0;
}
return id;
}
static unsigned int CreateShader(const std::string& vertexShader, const std::string& fragmentShader){
unsigned int program = glCreateProgram();
unsigned int vs = CompileShader(GL_VERTEX_SHADER , vertexShader);
unsigned int fs = CompileShader(GL_FRAGMENT_SHADER , fragmentShader);
glAttachShader(program, vs);
glAttachShader(program, fs);
glLinkProgram(program);
glValidateProgram(program);
glDeleteShader(vs);
glDeleteShader(fs);
return program;
}
int main(void)
{
GLFWwindow* window;
/* Initialize the library */
if (!glfwInit())
return -1;
glfwWindowHint( GLFW_CONTEXT_VERSION_MAJOR, 4 );
glfwWindowHint( GLFW_CONTEXT_VERSION_MINOR, 1 );
glfwWindowHint( GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE );
glfwWindowHint( GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE );
/* Create a windowed mode window and its OpenGL context */
window = glfwCreateWindow(640, 480, "I Rock", NULL, NULL);
if (!window)
{
glfwTerminate();
return -1;
}
/* Make the window's context current */
glfwMakeContextCurrent(window);
if(glewInit() != GLEW_OK)
std::cout<< "Error!" << std::endl;
float positions[6] = {
-0.5f, -0.5f,
0.0f, 0.5f,
0.5f, -0.5f
};
unsigned int buffer;
glGenBuffers(1, &buffer);
glBindBuffer(GL_ARRAY_BUFFER, buffer);
glBufferData(GL_ARRAY_BUFFER, 6*sizeof(float), positions, GL_STATIC_DRAW);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 2*sizeof(float), 0);
#define GLSL(version, shader) "#version " #version "\n" #shader
const char* vertexShader = GLSL
(
410 core,
layout( location = 0 ) in vec4 position;
void main()
{
gl_Position = position;
}
);
const char* fragmentShader = GLSL
(
410 core,
out vec4 color;
void main()
{
color = vec4( 1.0, 0.0, 0.0, 1.0 );
}
);
unsigned int shader = CreateShader(vertexShader, fragmentShader);
glUseProgram(shader);
/* Loop until the user closes the window */
while (!glfwWindowShouldClose(window))
{
/* Render here */
glClear(GL_COLOR_BUFFER_BIT);
glDrawArrays(GL_TRIANGLES, 0, 3);
/* Swap front and back buffers */
glfwSwapBuffers(window);
/* Poll for and process events */
glfwPollEvents();
}
glDeleteProgram(shader);
glfwTerminate();
return 0;
}
These are my specifications:
Upvotes: 1
Views: 470
Reputation: 52084
You need to generate & bind a Vertex Array Object (VAO) before enabling vertex attributes, setting vertex attrib pointers, and drawing:
...
unsigned int buffer;
glGenBuffers(1, &buffer);
glBindBuffer(GL_ARRAY_BUFFER, buffer);
glBufferData(GL_ARRAY_BUFFER, 6*sizeof(float), positions, GL_STATIC_DRAW);
// generate & bind VAO
GLuint vao = 0;
glGenVertexArrays( 1, &vao );
glBindVertexArray( vao );
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 2*sizeof(float), 0);
...
VAOs aren't optional in Core contexts like they are in Compatibility.
Upvotes: 1