Reputation: 475
I'm trying to figure out the right way to render a single texture in OpenGL, I've made some progress but I'm only getting a black square. I'm just not sure if there's something I'm not doing or if I'm not doing things in the correct order or maybe I've just made a mistake somewhere. All of the articles on the internet seem to say completely different things and the code is always split into tiny samples so I can never figure out what should go in the main loop and what shouldn't.
First off, here's what I have so far. I load the image using FreeImage:
FIBITMAP *load_image(image_data *image)
{
image->texture_id = 0;
FreeImage_Initialise(FALSE);
FIBITMAP *bitmap = NULL;
FREE_IMAGE_FORMAT format = FreeImage_GetFIFFromFilename(image->path);
if (!(bitmap = FreeImage_Load(
format,
image->path,
PNG_DEFAULT)))
exit(EXIT_FAILURE);
glGenTextures(1, &(image->texture_id));
glBindTexture(GL_TEXTURE_2D, image->texture_id);
glTexImage2D(
GL_TEXTURE_2D,
0,
GL_BGR,
FreeImage_GetWidth(bitmap),
FreeImage_GetHeight(bitmap),
0,
GL_BGR,
GL_UNSIGNED_INT,
bitmap);
FreeImage_Unload(bitmap);
return bitmap;
}
And then here I have my main program loop:
void main_loop(
image_data *image,
shader_info *vertex_shader,
shader_info *fragment_shader)
{
/* Create variables to house vertex-array/vertex-buffer object names. */
GLuint vArray, vBuffer, program, uniform_mytexture;
/* NDC (x, y) pair for each vertex. */
GLfloat vertices[4][2] = {
{-0.90, -0.90},
{0.90, -0.90},
{-0.90, 0.90},
{0.90, 0.90}};
printf("%s\n", glGetString(GL_VERSION));
/*
* Allocates OpenGL memory, and binds vArray and vBuffer
* to that memory for use as VAO and VBO names, repectively.
*/
program = initialize_image(
&vArray,
&vBuffer,
vertices,
sizeof(vertices),
vertex_shader,
fragment_shader);
/* Main display loop */
while (!glfwWindowShouldClose(window.glfw_window))
{
glClear(GL_COLOR_BUFFER_BIT);
glClearColor(0.0, 0.5, 0.0, 1.0);
glViewport(0, 0, window.width, window.height);
glUseProgram(program);
glBindVertexArray(vArray);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, image->texture_id);
uniform_mytexture = glGetUniformLocation(program, "mytexture");
glUniform1i(uniform_mytexture, GL_TEXTURE0);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
glfwSwapBuffers(window.glfw_window);
glfwPollEvents();
glFlush();
}
return;
}
As you can see I do most of the glGen* and glBind* stuff in initialize_image(), which looks like this:
int initialize_image(
GLuint*vArray,
GLuint *vBuffer,
GLfloat vertices[][2],
int size,
shader_info *vertex_shader,
shader_info *fragment_shader)
{
GLuint vShader, fShader, program, link_ok;
GLint attribute_vpos, attribute_texcoord;
const char *attribute_name_vpos = "vertex_position", *attribute_name_texcoord = "texcoord";
glGenVertexArrays(1, vArray);
glBindVertexArray(*vArray);
glGenBuffers(1, vBuffer);
glBindBuffer(GL_ARRAY_BUFFER, *vBuffer);
glBufferData(
GL_ARRAY_BUFFER,
size,
vertices,
GL_STATIC_DRAW);
vertex_shader->content = load_shader(vertex_shader->path);
fragment_shader->content = load_shader(fragment_shader->path);
vShader = compile_shader(GL_VERTEX_SHADER, vertex_shader->content);
fShader = compile_shader(GL_FRAGMENT_SHADER, fragment_shader->content);
program = glCreateProgram();
glAttachShader(program, vShader);
glAttachShader(program, fShader);
glLinkProgram(program);
glGetProgramiv(program, GL_LINK_STATUS, &link_ok);
if (!link_ok)
{
fprintf(stderr, "Shader linkage failed.\n");
exit(EXIT_FAILURE);
}
attribute_vpos = glGetAttribLocation(program, attribute_name_vpos);
if (attribute_vpos == -1)
{
fprintf(stderr, "Attribute binding failed.\n");
exit(EXIT_FAILURE);
}
glVertexAttribPointer(
attribute_vpos,
2,
GL_FLOAT,
GL_FALSE,
0,
BUFFER_OFFSET(0));
glEnableVertexAttribArray(attribute_vpos);
attribute_texcoord = glGetAttribLocation(program, attribute_name_texcoord);
if (attribute_texcoord == -1)
{
fprintf(stderr, "Attribute binding failed.\n");
exit(EXIT_FAILURE);
}
glEnableVertexAttribArray(attribute_texcoord);
GLuint texture_vbo;
GLfloat texture_coords[4][2] = {
{-1.0, -1.0},
{1.0, -1.0},
{-1.0, 1.0},
{1.0, 1.0}};
glGenBuffers(1, &texture_vbo);
glBindBuffer(GL_ARRAY_BUFFER, texture_vbo);
glBufferData(GL_ARRAY_BUFFER, sizeof(texture_coords), texture_coords, GL_STATIC_DRAW);
glEnableVertexAttribArray(attribute_texcoord);
glBindBuffer(GL_ARRAY_BUFFER, texture_vbo);
glVertexAttribPointer(
attribute_texcoord,
2,
GL_FLOAT,
GL_FALSE,
0,
0);
return program;
}
And here is my vertex shader:
#version 330 core
layout(location = 0) in vec4 vertex_position;
attribute vec2 texcoord;
varying vec2 f_texcoord;
void main(void)
{
gl_Position = vertex_position;
f_texcoord = texcoord;
return;
}
And the fragment shader:
#version 330 core
out vec4 fragment_color;
varying vec2 f_texcoord;
uniform sampler2D mytexture;
void main(void)
{
fragment_color = texture2D(mytexture, f_texcoord);
return;
}
I'm sorry to dump so much code, I can't really pinpoint where I went wrong so I didn't want to leave stuff out. If anybody can point me in the right direction I'd be really grateful.
I've been trying to figure out how to do this for days. I feel so incompetent for spending so much time on one of the most basic features of OpenGL.
Upvotes: 2
Views: 696
Reputation: 54602
There are a number of issues in this code:
As far as I could find, GL_BGR
is not valid as an internal texture format. Also, specifying GL_UNSIGNED_INT
as the type for glTexImage2D()
would mean that each component is an unsigned int, while your texture loading routine most likely provides unsigned bytes for the components. And bitmap
is a pointer to a FIBITMAP
object, while the last argument needs to be a pointer to the actual image data. So the call should be:
glTexImage2D(
GL_TEXTURE_2D,
0,
GL_RGB8,
FreeImage_GetWidth(bitmap),
FreeImage_GetHeight(bitmap),
0,
GL_BGR,
GL_UNSIGNED_BYTE,
FreeImage_GetBits(bitmap));
The value for the texture sampler uniform variable is wrong:
glUniform1i(uniform_mytexture, GL_TEXTURE0);
This needs to be the index of the texture unit, not the corresponding enum value. Replace the call by:
glUniform1i(uniform_mytexture, 0);
The shader code uses some storage qualifiers that are not available in the core profile. attribute
and varying
are not valid in the core profile. attribute
is replaced by in
, and varying
by out
in the vertex shader and in
in the fragment shader. So the declarations in the vertex shader should be:
layout(location = 0) in vec4 vertex_position;
layout(location = 1) in vec2 texcoord;
out vec2 f_texcoord;
and in the fragment shader:
out vec4 fragment_color;
in vec2 f_texcoord;
uniform sampler2D mytexture;
Upvotes: 3
Reputation: 43329
I am pretty sure if you are loading a PNG, you do not want GL_UNSIGNED_INT
for the pixel data type (that would imply a 96-bit RGB image). Instead, you probably want GL_UNSIGNED_BYTE
(24-bit).
glTexImage2D
to this:glTexImage2D(
GL_TEXTURE_2D,
0,
GL_RGB,
FreeImage_GetWidth (bitmap),
FreeImage_GetHeight (bitmap),
0,
GL_BGR,
GL_UNSIGNED_BYTE, // Assumption: FreeImage_GetBPP (bitmap) == 24
FreeImage_GetBits (bitmap));
Often times when reading a 24-bit RGB image you have to change GL's unpack alignment from 4-byte to 1-byte, but FreeImage guarantees some desired behavior I will illustrate below (in case you encountered any examples that called glPixelStorei (GL_UNPACK_ALIGNMENT, 1)
-- they do not apply here).
In FreeImage each scanline starts at a 32-bit boundary for performance reasons.
I suggest you browse through the rest of that API reference while you are at it. You can use many of those functions to eliminate things like the 24-bit image assumption I made above.
Upvotes: 2
Reputation: 664
Well, for starters, your image loading routine is completely wrong.
FIBITMAP *load_image(image_data *image)
{
image->texture_id = 0;
FreeImage_Initialise(FALSE);
FIBITMAP *bitmap = NULL;
FREE_IMAGE_FORMAT format = FreeImage_GetFIFFromFilename(image->path);
if (!(bitmap = FreeImage_Load(
format,
image->path,
PNG_DEFAULT)))
exit(EXIT_FAILURE);
glGenTextures(1, &(image->texture_id));
glBindTexture(GL_TEXTURE_2D, image->texture_id);
glTexImage2D(
GL_TEXTURE_2D,
0,
GL_BGR,
FreeImage_GetWidth(bitmap),
FreeImage_GetHeight(bitmap),
0,
GL_BGR,
GL_UNSIGNED_INT, //I think FreeImage only loads bytes...
bitmap); //...Hello, undefined behaviour...
FreeImage_Unload(bitmap);
return bitmap; //WHAT?
}
Also, FreeImage_Initialise and FreeImage_Deinitialise are meant to be called only once per program.
Upvotes: 2