sariug
sariug

Reputation: 106

Simple line rendering fails while trying to render pixel coordinates

I am sorry to carry this here, but I spend around 7 hours maybe for a really easy thing. Maybe some of you identify the problem.

I try to render some pixel coordinates to screen. The code is below.

For a 800x600 screen. Simply trying to calculate the position of the lines, and then to render it to the screen.

For example: Point A(400, 300, 0) and point B(500,300,0) shall be a simple black line from the center of the screen to the right.

As I call this class function in render, I thought I might be creating a separate rendering session. However, when I write something such as glCleanColor, the background changes.


#include <GLFW/glfw3.h>
#include <GL/gl.h>
#include <GL/glew.h>

#include <iostream>
#include <vector>

Vertex shader:

const GLchar *vertex_shader =
    "#version 410\n"
    "layout (location = 0) in vec3 pos;\n"
    "layout (location = 1) in vec4 col;\n"
    "uniform mat4 projection;\n"
    "out vec4 Frag_Color;\n"
    "void main()\n"
    "{\n"
    "    Frag_Color = col;\n"
    "    gl_Position =projection*vec4(pos.xyz,1);\n"
    "}\n";

Fragment Shader:

const GLchar *fragment_shader =
    "#version 410\n"
    "in vec4 Frag_Color;\n"
    "layout (location = 0) out vec4 Out_Color;\n"
    "void main()\n"
    "{\n"
    "    Out_Color = Frag_Color;\n"
    "}\n";

Vertex structure

struct Vrtx
{
    float pos[3];
    float col[4] = {0.0f, 0.0f, 0.0f, 1.0f};
};

coord axis class:

class CoordinateAxis
{
public:
    void coordinate_axis()
    {
        vertices.resize(6);
        for (int i = 0; i < 3; i++)
        {
            vertices[2 * i].pos[0] = 400;
            vertices[2 * i].pos[1] = 300;
            vertices[2 * i].pos[2] = 0;
        }
        vertices[1].pos[0] = 500;
        vertices[1].pos[1] = 300;
        vertices[1].pos[2] = 0;
        vertices[3].pos[0] = 400;
        vertices[3].pos[1] = 400;
        vertices[3].pos[2] = 0;
        vertices[3].pos[0] = 400;
        vertices[3].pos[1] = 430;
        vertices[3].pos[2] = 100;

        setupRender();
        glBindVertexArray(VAO);
        glDrawElements(GL_LINE, 6, GL_UNSIGNED_INT, 0);
        glBindVertexArray(0);
        glUseProgram(0);
    }
    CoordinateAxis()
    {
        initShaderProgram();
    };

private:
    void initShaderProgram()
    {
        // Vertex shader
        GLuint vHandle = glCreateShader(GL_VERTEX_SHADER);
        glShaderSource(vHandle, 1, &vertex_shader, NULL);
        glCompileShader(vHandle);

        // Fragment shader
        GLuint fHandle = glCreateShader(GL_FRAGMENT_SHADER);
        glShaderSource(fHandle, 1, &fragment_shader, NULL);
        glCompileShader(fHandle);

        // Create Program
        handleProgram = glCreateProgram();
        glAttachShader(handleProgram, vHandle);
        glAttachShader(handleProgram, fHandle);
        glLinkProgram(handleProgram);

        attribLocationProj = glGetUniformLocation(handleProgram, "projection");

        glGenVertexArrays(1, &VAO);
        // CreateBuffers
        glGenBuffers(1, &vboHandle);
        glGenBuffers(1, &iboHandle);
    }
    void setupRender()
    {
        GLint last_viewport[4];
        glGetIntegerv(GL_VIEWPORT, last_viewport);
        float L = last_viewport[0];
        float R = L + last_viewport[2];
        float B = last_viewport[1];
        float T = B + last_viewport[3];

        const float ortho_projection[4][4] =
            {
                {2.0f / (R - L), 0.0f, 0.0f, 0.0f},
                {0.0f, 2.0f / (T - B), 0.0f, 0.0f},
                {0.0f, 0.0f, -1.0f, 0.0f},
                {(R + L) / (L - R), (T + B) / (B - T), 0.0f, 1.0f},
            };
        glUseProgram(handleProgram);
        glUniformMatrix4fv(attribLocationProj, 1, GL_FALSE, &ortho_projection[0][0]);
        glBindVertexArray(VAO);

        glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, iboHandle);
        glBufferData(GL_ELEMENT_ARRAY_BUFFER, indices.size() * sizeof(GLuint), indices.data(), GL_STATIC_DRAW);

        glBindBuffer(GL_ARRAY_BUFFER, vboHandle);
        glBufferData(GL_ARRAY_BUFFER, vertices.size() * sizeof(Vrtx), vertices.data(), GL_STATIC_DRAW);

        glBindBuffer(GL_ARRAY_BUFFER, vboHandle);
        glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, vertices.size() * sizeof(Vrtx), 0);
        glVertexAttribPointer(1, 4, GL_FLOAT, GL_FALSE, vertices.size() * sizeof(Vrtx), (void *)(3 * sizeof(float)));

        glEnableVertexAttribArray(0);
        glEnableVertexAttribArray(1);
    }

    std::vector<Vrtx> vertices;
    std::vector<GLuint> indices;
    GLuint handleProgram, VAO, vboHandle, iboHandle, attribLocationProj;
};

Upvotes: 1

Views: 80

Answers (1)

Rabbid76
Rabbid76

Reputation: 211166

GL_LINE is not a valid primitive type. GL_LINE is a mode for glPolygonMode.
A valid line primitive type is GL_LINES:

glDrawElements(GL_LINE, 6, GL_UNSIGNED_INT, 0);

glDrawElements(GL_LINES, 6, GL_UNSIGNED_INT, 0);

Furthermore there is an issue when you set up the array of vertex attribute data by glVertexAttribPointer. The 5th parameter (strid) is the byte offset between consecutive attribute tuples, rather than the size of the buffer. sizeof(Vrtx) rather than vertices.size() * sizeof(Vrtx):

glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, sizeof(Vrtx), 0);
glVertexAttribPointer(1, 4, GL_FLOAT, GL_FALSE, sizeof(Vrtx), (void *)(3*sizeof(float)));

Note, the array of indices seems to be empty. Either initialize the array of indices:

indices = { 0, 1, 2, 3, 4, 5 };

or use glDrawArrays instead:

glDrawArrays(GL_LINES, 0, 6);

Upvotes: 1

Related Questions