Jarrett
Jarrett

Reputation: 1778

OpenGL VBO data seems to get corrupted

I've uploaded vertices, colors, normals, and texture coordinates into a single VBO, which is associated with a VAO. I also have an EBO associated with the same VAO that stores indices. I am also using SDL and OpenGL 3.3 (Core Profile context, which is set using SDL).

At first, my model seems to render fine. Then after maybe 8 or so seconds, it looks like the data gets corrupted.

Here is a video: https://youtu.be/eEiH3EFTPFk

Every frame I am pulling the data out of OpenGL (using glGetNamedBufferSubData) and comparing to what it should be, and everything seems to check out.

Does anyone have any idea what might be happening here? I appreciate any insight you guys might be able to provide.

Here is my code for loading the model data:

struct Vbo
{
    GLuint id;
};

struct Ebo
{
    GLuint id;
    GLenum mode;
    GLsizei count;
    GLenum type;
};

struct Vao
{
    GLuint id;
    Vbo vbo[4];
    Ebo ebo;
};

// ...

MeshId GraphicsEngine::createStaticMesh(
    std::vector<glm::vec3> vertices,
    std::vector<glm::detail::uint32> indices,
    std::vector<glm::vec4> colors,
    std::vector<glm::vec3> normals,
    std::vector<glm::vec2> textureCoordinates
)
{
    Vao vao;

    glGenVertexArrays(1, &vao.id);
    glGenBuffers(1, &vao.vbo[0].id);
    glGenBuffers(1, &vao.ebo.id);

    auto size = vertices.size() * sizeof(glm::vec3);
    size += colors.size() * sizeof(glm::vec4);
    size += normals.size() * sizeof(glm::vec3);
    size += textureCoordinates.size() * sizeof(glm::vec2);

    glBindVertexArray(vao.id);
    glBindBuffer(GL_ARRAY_BUFFER, vao.vbo[0].id);
    glBufferData(GL_ARRAY_BUFFER, size, nullptr, GL_STATIC_DRAW);

    auto offset = 0;
    glBufferSubData(GL_ARRAY_BUFFER, offset, vertices.size() * sizeof(glm::vec3), &vertices[0]);
    glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, 0);
    glEnableVertexAttribArray(0);

    offset += vertices.size() * sizeof(glm::vec3);
    glBufferSubData(GL_ARRAY_BUFFER, offset, colors.size() * sizeof(glm::vec4), &colors[0]);
    glVertexAttribPointer(1, 4, GL_FLOAT, GL_FALSE, 0, (GLvoid*)(offset));
    glEnableVertexAttribArray(1);

    offset += colors.size() * sizeof(glm::vec4);
    glBufferSubData(GL_ARRAY_BUFFER, offset, normals.size() * sizeof(glm::vec3), &normals[0]);
    glVertexAttribPointer(2, 3, GL_FLOAT, GL_FALSE, 0, (GLvoid*)(offset));
    glEnableVertexAttribArray(2);

    offset += normals.size() * sizeof(glm::vec3);
    glBufferSubData(GL_ARRAY_BUFFER, offset, textureCoordinates.size() * sizeof(glm::vec2), &textureCoordinates[0]);
    glVertexAttribPointer(3, 2, GL_FLOAT, GL_FALSE, 0, (GLvoid*)(offset));
    glEnableVertexAttribArray(3);

    glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, vao.ebo.id);
    glBufferData(GL_ELEMENT_ARRAY_BUFFER, indices.size() * sizeof(glm::detail::uint32), &indices[0], GL_STATIC_DRAW); 

    glBindVertexArray(0);
    glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);

    vao.ebo.count = indices.size();
    vao.ebo.mode = GL_TRIANGLES;
    vao.ebo.type =  GL_UNSIGNED_INT;

    vertexArrayObjects_.push_back(vao);
    auto index = vertexArrayObjects_.size() - 1;

    return MeshId(index);
}

Here is my code that does the rendering:

// Setup camera
const glm::quat temp = glm::conjugate(camera_.orientation);

view_ = glm::mat4_cast(temp);
view_ = glm::translate(view_, glm::vec3(-camera_.position.x, -camera_.position.y, -camera_.position.z));

glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

glUseProgram(shaderProgram_);

const int modelMatrixLocation = glGetUniformLocation(shaderProgram_, "modelMatrix");
const int pvmMatrixLocation = glGetUniformLocation(shaderProgram_, "pvmMatrix");
const int normalMatrixLocation = glGetUniformLocation(shaderProgram_, "normalMatrix");

glm::detail::uint32 i = 0;  
for ( const auto& r : renderables_ )
{
    const auto& graphicsData = graphicsData_[i];

    glm::mat4 newModel = glm::translate(model_, graphicsData.position);
    newModel = newModel * glm::mat4_cast( graphicsData.orientation );
    newModel = glm::scale(newModel, graphicsData.scale);

    // Send uniform variable values to the shader       
    const glm::mat4 pvmMatrix(projection_ * view_ * newModel);
    glUniformMatrix4fv(pvmMatrixLocation, 1, GL_FALSE, &pvmMatrix[0][0]);

    glm::mat3 normalMatrix = glm::inverse(glm::transpose(glm::mat3(view_ * newModel)));
    glUniformMatrix3fv(normalMatrixLocation, 1, GL_FALSE, &normalMatrix[0][0]);

    glUniformMatrix4fv(modelMatrixLocation, 1, GL_FALSE, &newModel[0][0]);

    glBindTexture(GL_TEXTURE_2D, r.texture.id);
    glBindVertexArray(r.vao.id);
    glDrawElements(r.vao.ebo.mode, r.vao.ebo.count, r.vao.ebo.type, 0);
    glBindVertexArray(0);

    i++;
}

Fragment shader:

#version 330 core

in vec4 ourColor;
in vec2 texCoord;

out vec4 color;

uniform sampler2D ourTexture;

void main()
{
    color = texture(ourTexture, texCoord);
}

Vertex shader:

#version 330 core

uniform mat4 projectionMatrix;
uniform mat4 viewMatrix;
uniform mat4 modelMatrix;
uniform mat4 pvmMatrix;
uniform mat3 normalMatrix;

layout (location = 0) in vec3 position;
layout (location = 1) in vec4 color;
layout (location = 2) in vec3 normal;
layout (location = 3) in vec2 textureCoordinate;

out vec4 ourColor;
out vec2 texCoord;

void main()
{
    //gl_Position = vec4(position, 1.0);
    gl_Position = pvmMatrix * vec4(position, 1.0);
    ourColor = color;
    texCoord = textureCoordinate;
}

Upvotes: 2

Views: 580

Answers (1)

Jarrett
Jarrett

Reputation: 1778

As per @MichaelNastenkos comment about, I added glEnable(GL_DEPTH_TEST); before my rendering code and it seems to fix it.

Upvotes: 0

Related Questions