Chris Camacho
Chris Camacho

Reputation: 1174

GLSL getting odd values back from my uniform and it seems to be set with the wrong value too

I'm having problems using a uniform in a vertex shader

heres the code

// gcc main.c -o main `pkg-config --libs --cflags glfw3` -lGL -lm


#include <GLFW/glfw3.h>
#include <stdio.h>
#include <stdlib.h>
#include <math.h>

void gluErrorString(const char* why,GLenum errorCode);
void checkShader(GLuint status, GLuint shader, const char* which);


float verts[] = {
    -0.5f,   0.5f,   0.0f,
     0.5f,   0.5f,   0.0f,
     0.5f,  -0.5f,   0.0f,
    -0.5f,  -0.5f,   0.0f
};

const char* vertex_shader =
    "#version 330\n"
    "in vec3 vp;\n"
    "uniform float u_time;\n"
    "\n"
    "void main () {\n"
    "  vec4 p = vec4(vp, 1.0);\n"
    "  p.x = p.x + u_time;\n"
    "  gl_Position = p;\n"
    "}";

const char* fragment_shader =
    "#version 330\n"
    "out vec4 frag_colour;\n"
    "void main () {\n"
    "  frag_colour = vec4 (0.5, 0.0, 0.5, 1.0);\n"
    "}";




int main () {

    if (!glfwInit ()) {
        fprintf (stderr, "ERROR: could not start GLFW3\n");
        return 1;
    }

    glfwWindowHint (GLFW_CONTEXT_VERSION_MAJOR, 3);
    glfwWindowHint (GLFW_CONTEXT_VERSION_MINOR, 2);
    //glfwWindowHint (GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);
    glfwWindowHint (GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);

    GLFWwindow* window = glfwCreateWindow (640, 480, "Hello Triangle", NULL, NULL);
    if (!window) {
        fprintf (stderr, "ERROR: could not open window with GLFW3\n");
        glfwTerminate();
        return 1;
    }

    glfwMakeContextCurrent (window);

    // vert arrays group vert buffers together unlike GLES2 (no vert arrays)
    // we *must* have one of these even if we only need 1 vert buffer
    GLuint vao = 0;
    glGenVertexArrays (1, &vao);
    glBindVertexArray (vao);

    GLuint vbo = 0;
    glGenBuffers (1, &vbo);
    glBindBuffer (GL_ARRAY_BUFFER, vbo);
    // each vert takes 3 float * 4 verts in the fan = 12 floats
    glBufferData (GL_ARRAY_BUFFER, 12 * sizeof (float), verts, GL_STATIC_DRAW);
    gluErrorString("buffer data",glGetError());

    glEnableVertexAttribArray (0);
    glBindBuffer (GL_ARRAY_BUFFER, vbo);
    // 3 components per vert
    glVertexAttribPointer (0, 3, GL_FLOAT, GL_FALSE, 0, NULL);
    gluErrorString("attrib pointer",glGetError());


    GLuint vs = glCreateShader (GL_VERTEX_SHADER);
    glShaderSource (vs, 1, &vertex_shader, NULL);
    glCompileShader (vs);
    GLint success = 0;
    glGetShaderiv(vs, GL_COMPILE_STATUS, &success);
    checkShader(success, vs, "Vert Shader");

    GLuint fs = glCreateShader (GL_FRAGMENT_SHADER);
    glShaderSource (fs, 1, &fragment_shader, NULL);
    glCompileShader (fs);
    glGetShaderiv(fs, GL_COMPILE_STATUS, &success);
    checkShader(success, fs, "Frag Shader"); 

    GLuint shader_program = glCreateProgram ();
    glAttachShader (shader_program, fs);
    glAttachShader (shader_program, vs);
    glLinkProgram (shader_program);
    gluErrorString("Link prog",glGetError());

    glUseProgram (shader_program);
    gluErrorString("use prog",glGetError());


    GLuint uniT = glGetUniformLocation(shader_program,"u_time"); // ask gl to assign uniform id
    gluErrorString("get uniform location",glGetError());    

    printf("uniT=%i\n",uniT);

    glEnable (GL_DEPTH_TEST);
    glDepthFunc (GL_LESS);
    float t=0;



    while (!glfwWindowShouldClose (window)) {

        glClear (GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
        gluErrorString("clear",glGetError());

        glUseProgram (shader_program);
        gluErrorString("use prog",glGetError());

        t=t+0.01f;
        glUniform1f( uniT, (GLfloat)sin(t));
        gluErrorString("set uniform",glGetError());

        float val;
        glGetUniformfv(shader_program,  uniT,  &val);
        gluErrorString("get uniform",glGetError());
        printf("val=%f ",val);

        glBindVertexArray (vao);
        gluErrorString("bind array",glGetError());

        glDrawArrays (GL_TRIANGLE_FAN, 0, 4);
        gluErrorString("draw arrays",glGetError());

        glfwPollEvents ();
        glfwSwapBuffers (window);
        gluErrorString("swap buffers",glGetError());
    }

    glfwTerminate();
    return 0;
}


void checkShader(GLuint status, GLuint shader, const char* which) {
    if (status==GL_TRUE) return;
    int length;
    char buffer[1024];
    glGetShaderInfoLog(shader, sizeof(buffer), &length, buffer);
    fprintf (stderr,"%s Error: %s\n", which,buffer);
    glfwTerminate();
    exit(-1);
}

struct token_string
{
    GLuint Token;
    const char *String;
};

static const struct token_string Errors[] = {
    { GL_NO_ERROR, "no error" },
    { GL_INVALID_ENUM, "invalid enumerant" },
    { GL_INVALID_VALUE, "invalid value" },
    { GL_INVALID_OPERATION, "invalid operation" },
    { GL_STACK_OVERFLOW, "stack overflow" },
    { GL_STACK_UNDERFLOW, "stack underflow" },
    { GL_OUT_OF_MEMORY, "out of memory" },
    { GL_TABLE_TOO_LARGE, "table too large" },
#ifdef GL_EXT_framebuffer_object
    { GL_INVALID_FRAMEBUFFER_OPERATION_EXT, "invalid framebuffer operation" },
#endif

    { ~0, NULL } /* end of list indicator */
};

void gluErrorString(const char* why,GLenum errorCode)
{
    if (errorCode== GL_NO_ERROR) return;
    int i;
    for (i = 0; Errors[i].String; i++) {
        if (Errors[i].Token == errorCode) { 
            fprintf (stderr,"error: %s - %s\n",why,Errors[i].String);
            glfwTerminate();
            exit(-1);            
        }
    }

}

When the code runs, the quad flickers as if the uniform is getting junk values, also getting the value of the uniform shows some odd values like 36893488147419103232.000000 where it should be just a simple sine value

Upvotes: 1

Views: 307

Answers (1)

derhass
derhass

Reputation: 45352

The problem with your code is only indirectly related to GL at all - your GL code is OK.

However, you are using modern OpenGL functions without loading the function pointers as an extension. This might work on some platforms, but not at others. MacOS does guarantee that these functions are exported in the system's GL libs. On windows, Microsofts opengl32.dll never contains function beyond GL 1.1 - your code wouldn't link there. On Linux, you're somewhere inbetween. There is only this old Linux OpenGL ABI document, which guarantees that OpenGL 1.2 functions must be exported by the library. In practice, most GL implementation's libs on Linux export everything (but the fact that the function is there does not mean that it is supported). But you should never directly link these functions, because nobody is guaranteeing anything.

However, the story does not end here: You apparently did this on an implementation which does export the symbols. However, you did not include the correct headers. And you have set up your compiler very poorly. In C, it is valid (but poor style) to call a function which has not been declared before. The compiler will asslume that it returns int and that all parameters are ints. In effect, you are calling these functions, but the compiler will convert the arguments to int.

You would have noticed that if you had set up your compiler to produce some warnings, like -Wall on gcc:

a.c: In function ‘main’:
a.c:74: warning: implicit declaration of function ‘glGenVertexArrays’
a.c:75: warning: implicit declaration of function ‘glBindVertexArray’
[...]

However, the code compiles and links, and I can reproduces results you described (I'm using Linux/Nvidia here).

To fix this, you should use a OpenGL Loader Library. For example, I got your code working by using GLEW. All I had to do was adding at the very top at the file

#define GLEW_NO_GLU // because you re-implemented some glu-like functions with a different interface
#include <glew.h>

and calling

glewExperimental=GL_TRUE;
if (glewInit() != GLEW_OK) {
    fprintf (stderr, "ERROR: failed to initialize GLEW\n");
    glfwTerminate();
    return 1;
}
glGetError(); // read away error generated by GLEW, it is broken in core profiles...

The GLEW headers include declarations for all the functions, so no implicit type conversions do occur any more. GLEW might not be the best choice for core profiles, however I just used it because that's the loader I'm most familiar with.

Upvotes: 2

Related Questions