Frank
Frank

Reputation: 29

Unable to display texture in OpenGL

So I am having some trouble with applying a texture to my mesh in OpenGL. I have looked high and low for a solution but have not found a solution in many hours of research.

I am hoping another pair of eyes might be able to catch the error in my code.

Everything works great when I set a color in my shader, but if I try to pass it a texture, I either get

The code below draws a simple square and (should) apply a texture to it.

Image Loader:

void Texture::loadTexture(const char* _filename)
{
    char fn[255] = "assets/textures/";
    strcat(fn, _filename);

    std::cout << fn << std::endl;

    FREE_IMAGE_FORMAT format = FreeImage_GetFileType(fn, 0);
    FIBITMAP* bitmap = FreeImage_Load(format, fn);
    FIBITMAP* bitmap32 = nullptr;

    unsigned int bitsPerPixel =  FreeImage_GetBPP(bitmap);
    std::cout << "Image BPP: " << bitsPerPixel << std::endl;

    int imageWidth  = FreeImage_GetWidth(bitmap);
    int imageHeight = FreeImage_GetHeight(bitmap);
    std::cout << "Image size: " << imageWidth << " x " << imageHeight << std::endl;

    if (bitsPerPixel != 32)
        bitmap32 = FreeImage_ConvertTo32Bits(bitmap);
    else
        bitmap32 = bitmap;

    BYTE* textureData = FreeImage_GetBits(bitmap32);

    //for(int i = 0; i < imageWidth * imageHeight * 4; i += 4) {
    //   std::cout << (int)textureData[i] << ", " << (int)textureData[i+1] << ", " << (int)textureData[i+2] << ", " << (int)textureData[i+3] << std::endl;
    //}

    glBindTexture(GL_TEXTURE_2D, m_ID);

    glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA8, imageWidth, imageHeight, 0, GL_BGRA, GL_UNSIGNED_BYTE, (void*)textureData);

    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);

    FreeImage_Unload(bitmap);

    if (bitsPerPixel != 32)
        FreeImage_Unload(bitmap32);

}

I have confirmed that the image file loads and that the data is correct by outputting the image data in a readable format. The results are as expected.

My main loop:

int main()
{

const std::vector<sun::Vertex> vertices = {
        sun::Vertex(sun::Vector3f(-4.0f, 4.0f, 0.0f), sun::Vector2f(0.0f, 1.0f)),
        sun::Vertex(sun::Vector3f(4.0f, 4.0f, 0.0f), sun::Vector2f(1.0f, 1.0f)),
        sun::Vertex(sun::Vector3f(-4.0f, -4.0f, 0.0f), sun::Vector2f(0.0f, 0.0f)),
        sun::Vertex(sun::Vector3f(4.0f, -4.0f, 0.0f), sun::Vector2f(1.0f, 0.0f))
};

const std::vector<unsigned int> indices = {0, 1, 2, 1, 3, 2};

sun::Window window("My Game", 1280, 800);
sun::Shader shader("basic.vs", "basic.fs");
sun::Mesh mesh;
sun::Transform transform;
sun::Texture texture;

transform.setProjection(70.0f, window.getWidth(), window.getHeight(), 1.0f, -1000.0f);

mesh.addVertices(vertices, indices);
texture.loadTexture("test2.png");

shader.addUniform("transform");
shader.bind();

glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);

// Start the main loop
while (!window.closeWindow()) {

    // Render
    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
    glClearColor(0.3f, 0.8f, 1.0f, 1.0f);

    transform.setTranslation(0.0f, 0.0f, 10.0f);

    shader.setUniform("transform", transform.getProjectedTransformation());

    texture.bind(); // { glBindTexture(GL_TEXTURE_2D, m_ID); }
    glEnable(GL_TEXTURE_2D);

    mesh.draw(); // See below for details.

    glDisable(GL_TEXTURE_2D);
    glBindTexture(GL_TEXTURE_2D, 0);

    window.update();

}

return 1;
}

The mesh.draw() method:

void Mesh::draw()
{
    //std::cout << m_Index << std::endl;
    glEnableVertexAttribArray(0);
    glEnableVertexAttribArray(1);

    glBindBuffer(GL_ARRAY_BUFFER, m_VBO);
    glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, sizeof(Vertex), 0);
    glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, sizeof(Vertex), (const void*)sizeof(Vector3f));

    glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, m_IBO);
    glDrawElements(GL_TRIANGLES, m_SizeIndices, GL_UNSIGNED_INT, 0);

    glDisableVertexAttribArray(1);
    glDisableVertexAttribArray(0);
}

And at last, my Vertex and Fragment shaders respectively:

#version 330 core

layout(location = 0) in vec3 position;
layout(location = 1) in vec2 texCoord;

uniform mat4 transform;

out vec2 coords;

void main()
{
    gl_Position = transform * vec4(position, 1.0f);
    coords = texCoord;
}

#version 330 core

in vec2 coord;

uniform sampler2D sampler;

void main()
{
    gl_FragColor = texture2D(sampler, coord.xy);//vec4(1.0f, 0.0f, 0.0f, 1.0f);
}

As added information, the following OpenGL parameters are set/enabled during initialisation:

void Window::init()
{
    // Set viewport size
    glfwGetFramebufferSize(m_Window, &m_Width, &m_Height);
    glViewport(0, 0, m_Width, m_Height);

    // Set vsync
    glfwSwapInterval(0);

    // Set face culling and depth test.
    glFrontFace(GL_CW);
    glCullFace(GL_BACK);
    glEnable(GL_CULL_FACE);
    glEnable(GL_DEPTH_TEST);

    // Enable 2D textures
    glEnable(GL_TEXTURE_2D);

    // Enable gamma correction
    glEnable(GL_FRAMEBUFFER_SRGB);
}

Any help would be greatly appreciated.

Thank you!

Upvotes: 1

Views: 210

Answers (1)

Frank
Frank

Reputation: 29

Annnnd I found my issue. Goes to show that staring at your own code for too long makes you miss the most obvious things.

The problem was in my shader. The Vertex Shader was outputting

vec2 coords

But the fragment shader was waiting for

vec2 coord

Once I changed the variable names to match, I got my texture.

Upvotes: 1

Related Questions