Reputation: 4316
In trying to move into using "modern" OpenGL (basically 3.2+), I've run into some troubles running basic code (derived from both here and here) using GLFW, GLEW, and OpenGL.
My first problem is that with the below code:
#define GLEW_STATIC
#include <GL/glew.h>
#include <GLFW/glfw3.h>
#include <stdlib.h>
#include <stdio.h>
const GLchar* vertexSource =
"#version 150 core\n"
"in vec2 position;"
"void main()"
"{"
" gl_Position = vec4(position, 0.0, 1.0);"
"}";
const GLchar* fragmentSource =
"#version 150 core\n"
"out vec4 outColor;"
"void main()"
"{"
" outColor = vec4(1.0, 1.0, 1.0, 1.0);"
"}";
void checkErr(const char* msg) {
GLenum err = glGetError();
if (err != 0) {
printf("@ \"%s\": %d\n", msg, err);
exit(EXIT_FAILURE);
} else {
printf("@ \"%s\": successful\n", msg);
}
}
int main(int argc, char* argv[]) {
GLFWwindow* window;
// Initialize GLFW
if (!glfwInit())
return -1;
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 2);
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
// Create a windowed mode window and its OpenGL context
window = glfwCreateWindow(640, 480, "Hello World", NULL, NULL);
if (!window)
{
glfwTerminate();
return -1;
}
// Make the window's context current
glfwMakeContextCurrent(window);
// Initialize GLEW
glewExperimental = GL_TRUE;
glewInit();
// get version info
const GLubyte* renderer = glGetString(GL_RENDERER);
const GLubyte* version = glGetString(GL_VERSION);
const GLubyte* glslVersion = glGetString(GL_SHADING_LANGUAGE_VERSION);
printf ("Renderer: %s\n", renderer);
printf ("OpenGL version: %s\n", version);
printf ("GLSL version: %s\n", glslVersion);
// Create Vertex Array Object
GLuint vao;
glGenVertexArrays(1, &vao);
checkErr("Gen VAO");
glBindVertexArray(vao);
checkErr("Bind VAO");
// Create a Vertex Buffer Object and copy the vertex data to it
GLuint vbo;
glGenBuffers(1, &vbo);
checkErr("Gen VBO");
GLfloat vertices[] = {
0.0f, 0.5f,
0.5f, -0.5f,
-0.5f, -0.5f
};
glBindBuffer(GL_ARRAY_BUFFER, vbo);
checkErr("Bind VBO");
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
checkErr("VBO data");
// Create and compile the vertex shader
GLuint vertexShader = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vertexShader, 1, &vertexSource, NULL);
glCompileShader(vertexShader);
checkErr("Compile vert shader");
// Create and compile the fragment shader
GLuint fragmentShader = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fragmentShader, 1, &fragmentSource, NULL);
glCompileShader(fragmentShader);
checkErr("Compile frag shader");
// Link the vertex and fragment shader into a shader program
GLuint shaderProgram = glCreateProgram();
glAttachShader(shaderProgram, vertexShader);
glAttachShader(shaderProgram, fragmentShader);
glBindFragDataLocation(shaderProgram, 0, "outColor");
glLinkProgram(shaderProgram);
checkErr("Link program");
glUseProgram(shaderProgram);
checkErr("Use program");
// Specify the layout of the vertex data
GLint posAttrib = glGetAttribLocation(shaderProgram, "position");
glEnableVertexAttribArray(posAttrib);
checkErr("Enable vertex attrib");
glVertexAttribPointer(posAttrib, 2, GL_FLOAT, GL_FALSE, 0, 0);
checkErr("Describe vert data");
// Loop until the user closes the window
while (!glfwWindowShouldClose(window))
{
/* Render here */
glClearColor(0.0, 0.0, 0.0, 1.0);
glClear(GL_COLOR_BUFFER_BIT);
glDrawArrays(GL_TRIANGLES, 0, 3);
/* Swap front and back buffers */
glfwSwapBuffers(window);
/* Poll for and process events */
glfwPollEvents();
}
glfwTerminate();
exit(EXIT_SUCCESS);
}
I'm immediately running into GL_INVALID_OPERATION
errors on the very first step of setting up the vertex array object.
I've done a fair bit of research regarding OS X's flimsy OpenGL support, but so far most of the things I've modified in this code has done nothing more but produce a completely black screen (that is, when I removed the crashing behavior of the checkError
helper function).
For reference, I'm running on an early 2015 MacBook Pro w/Retina, OS X v10.11.3, and the output of the version info from my above program lists as follows:
Renderer: Intel(R) Iris(TM) Graphics 6100
OpenGL version: 4.1 INTEL-10.12.13
GLSL version: 4.10
Any help is greatly appreciated!
Upvotes: 1
Views: 1811
Reputation: 45362
You just assumed that your error was generated by glGenVertexArrays
. But that is not the case. It is generated by glewInit
. And this is because GLEW is just broken on core profile opengl: it uses glGetString(GL_EXTENSIONS)
to query the extension string, which is not available in core prodiles and generates a GL_INVALID_ENUM
error (1280).
Normally, glewInit
will then abort with return code GL_FALSE
. However, the "workaround" of setting glewExperimental=GL_TRUE
will make it going on, ignoring the error, and querying all the extension pointers anyway. This is now broken in at least 3 different regards:
NULL
, but calling these would be undefined behavior. Together with 1, this means you have no way of checking the availibility of any extension, except by manually doing the stuff glew is actually there for to do for you.As a quick & dirty hack, you can just add a glGetError()
right after glewInit
, to read the error away. After I did that, your code produced the expected white triangle on my implementation (NVIDIA/Linux).
A better fix is probably to switch over to another GL loader which does properly work with core profiles, for example glad. Switching over will not be really hard, as only that init function has to be replaced. Note that glad is not a loader library, but a python script which generates a loader source file for your needs, so you don't need to link another library, but just have to add another source file to your project.
Upvotes: 3