Reputation: 13
I can't seem to see a red Triangle.
OS: macOS on a M1 Mac // I did also do the core profile thing
Code:
#define GL_SILENCE_DEPRECATION
#include <GL/glew.h>
#include <GLFW/glfw3.h>
#include <iostream>
#include <string>
static unsigned int CompileShader(unsigned int type, const std::string& source) {
unsigned int id = glCreateShader(type);
const char* scr = source.c_str();
glShaderSource(id, 1, &scr, nullptr);
glCompileShader(id);
int result;
glGetShaderiv(id, GL_COMPILE_STATUS, &result);
if (result == GL_FALSE) {
int length;
glGetShaderiv(id, GL_INFO_LOG_LENGTH, &length);
char* message = (char *) alloca(length * sizeof(char));
glGetShaderInfoLog(id, length, &length, message);
std::cout << "\nError: Failed to compile shader! Error list:\n" << message << " Type: " << (type == GL_VERTEX_SHADER ? "Vertex": "Fragment" ) << "\n";
glDeleteShader(id);
return 0;
}
return id;
}
static unsigned int CreateShader(const std::string& vertexShader, const std::string& fragmentShader) {
unsigned int program = glCreateProgram();
unsigned int vs = CompileShader(GL_VERTEX_SHADER, vertexShader);
unsigned int fs = CompileShader(GL_FRAGMENT_SHADER, fragmentShader);
glAttachShader(program, vs);
glAttachShader(program, fs);
glLinkProgram(program);
glValidateProgram(program);
glDeleteShader(vs);
glDeleteShader(fs);
return program;
}
int main(void)
{
GLFWwindow* window;
/* Initialize the library */
if (!glfwInit())
return -1;
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
#ifdef __APPLE__
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE); // uncomment this statement to fix compilation on OS X
#endif
/* Create a windowed mode window and its OpenGL context */
window = glfwCreateWindow(640, 480, "Hello World", NULL, NULL);
if (!window)
{
glfwTerminate();
return -1;
}
/* Make the window's context current */
glfwMakeContextCurrent(window);
// Glew init should be after valid opengl context
if (glewInit() != GLEW_OK) {
std::cout << "Error!\n";
}
float positions[6] = {
0.0f, 0.5f,
-0.5f, -0.5f,
0.5f, -0.5f
};
unsigned int buffer;
glGenBuffers(1, &buffer);
glBindBuffer(GL_ARRAY_BUFFER, buffer);
glBufferData(GL_ARRAY_BUFFER, 6 * sizeof(float), positions, GL_STATIC_DRAW);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, sizeof(float) * 2, 0);
std::string vertexShader =
"#version 330 core\n"
"\n"
"layout(location = 0) in vec4 position;\n"
"\n"
"void main() {\n"
" gl_Position = position;\n"
"}\n";
std::string fragmentShader =
"#version 330 core\n"
"\n"
"layout(location = 0) out vec4 color;\n"
"\n"
"void main() {\n"
" color = vec4(1.0, 1.0, 0.0, 1.0);\n"
"}\n";
unsigned int shader = CreateShader(vertexShader, fragmentShader);
glUseProgram(shader);
/* Loop until the user closes the window */
while (!glfwWindowShouldClose(window))
{
/* Render here */
glClear(GL_COLOR_BUFFER_BIT);
glDrawArrays(GL_TRIANGLES, 0, 3);
/* Swap front and back buffers */
glfwSwapBuffers(window);
/* Poll for and process events */
glfwPollEvents();
}
glfwTerminate();
return 0;
}
It looks like this: Image of how the window looks
Upvotes: 0
Views: 129
Reputation: 52162
Vertex array objects (VAOs) aren't optional in Core contexts like they are in Compatibility contexts.
You need to generate and bind a VAO before setting up your vertex attributes and drawing:
GLuint vao = 0;
glGenVertexArrays( 1, &vao );
glBindVertexArray( vao );
...
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, sizeof(float) * 2, 0);
...
glDrawArrays( GL_TRIANGLES, 0, 3 );
All together:
#include <glad/glad.h>
#define GLFW_INCLUDE_NONE
#include <GLFW/glfw3.h>
#include <vector>
#include <iostream>
void CheckStatus( GLuint obj, bool isShader )
{
GLint status = GL_FALSE, log[ 1 << 11 ] = { 0 };
( isShader ? glGetShaderiv : glGetProgramiv )( obj, isShader ? GL_COMPILE_STATUS : GL_LINK_STATUS, &status );
if( status == GL_TRUE ) return;
( isShader ? glGetShaderInfoLog : glGetProgramInfoLog )( obj, sizeof( log ), NULL, (GLchar*)log );
std::cerr << (GLchar*)log << "\n";
std::exit( EXIT_FAILURE );
}
void AttachShader( GLuint program, GLenum type, const char* src )
{
GLuint shader = glCreateShader( type );
glShaderSource( shader, 1, &src, NULL );
glCompileShader( shader );
CheckStatus( shader, true );
glAttachShader( program, shader );
glDeleteShader( shader );
}
const char* const vert = R"GLSL(
#version 330 core
layout(location = 0) in vec4 position;
void main() {
gl_Position = position;
}
)GLSL";
const char* const frag = R"GLSL(
#version 330 core
layout(location = 0) out vec4 color;
void main() {
color = vec4(1.0, 1.0, 0.0, 1.0);
}
)GLSL";
int main( int, char** )
{
glfwSetErrorCallback( []( int, const char* desc )
{
std::cerr << desc << "\n"; std::exit( EXIT_FAILURE );
} );
glfwInit();
glfwWindowHint( GLFW_CONTEXT_VERSION_MAJOR, 3 );
glfwWindowHint( GLFW_CONTEXT_VERSION_MINOR, 3 );
glfwWindowHint( GLFW_OPENGL_FORWARD_COMPAT, GLFW_TRUE );
glfwWindowHint( GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE );
GLFWwindow* window = glfwCreateWindow( 640, 480, "GLFW", NULL, NULL );
glfwMakeContextCurrent( window );
gladLoadGLLoader( (GLADloadproc)glfwGetProcAddress );
float positions[6] =
{
0.0f, 0.5f,
-0.5f, -0.5f,
0.5f, -0.5f,
};
unsigned int buffer;
glGenBuffers(1, &buffer);
glBindBuffer(GL_ARRAY_BUFFER, buffer);
glBufferData(GL_ARRAY_BUFFER, 6 * sizeof(float), positions, GL_STATIC_DRAW);
GLuint vao = 0;
glGenVertexArrays( 1, &vao );
glBindVertexArray( vao );
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, sizeof(float) * 2, 0);
GLuint prog = glCreateProgram();
AttachShader( prog, GL_VERTEX_SHADER, vert );
AttachShader( prog, GL_FRAGMENT_SHADER, frag );
glLinkProgram( prog );
CheckStatus( prog, false );
glUseProgram( prog );
while( !glfwWindowShouldClose( window ) )
{
glfwPollEvents();
int w, h;
glfwGetFramebufferSize( window, &w, &h );
glViewport( 0, 0, w, h );
glClear( GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT );
glDrawArrays( GL_TRIANGLES, 0, 3 );
glfwSwapBuffers( window );
}
glfwTerminate();
}
Upvotes: 2