Reputation: 175
I've been struggling to understand the output of glReadPixels, which appears to be straightforward in theory, but actually produces puzzling (at least to me) results.
Let's assume I have a simple fragment shader that draws a single triangle with a color value of vec4(0.2, 0, 0, 0), whereas the background color is set to (0.3, 1.0, 1.0, 0.0), like so:
Below is the complete code (with the exception of shader construction) that I use to produce the image above:
#include "shader.h" // shader compile/link/use
#include <GLFW\glfw3.h>
#include <iostream>
const int DISPLAY_WIDTH = 16;
const int DISPLAY_HEIGHT = 16;
//============= shader code ==========================
const GLchar *vertexShaderSource = R"glsl(#version 440
in vec2 position;
void main()
{
gl_Position = vec4(position, 0.0, 1.0);
})glsl";
const GLchar *fragmentShaderSource = R"glsl(#version 440
out vec4 outColor;
void main()
{
outColor = vec4(0.2,0.,0.,0.);
})glsl";
//============= c++ entry point ==========================
int main(int argc, char** argv) {
glfwInit();
GLFWwindow* window = glfwCreateWindow(DISPLAY_WIDTH, DISPLAY_HEIGHT, "test", NULL, NULL);
glfwMakeContextCurrent(window);
GLenum res = glewInit();
// triangle data (xy-position)
float vertices[] = {
0.0f, 0.5f,
0.5f, -0.5f,
-0.5f, -0.5f
};
GLuint vbo;
glGenBuffers(1, &vbo);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
// enable vertex xy-position attribute
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 0, 0);
glEnableVertexAttribArray(0);
// compile, link and use shader program
Shader shader(vertexShaderSource, fragmentShaderSource);
shader.Use();
// rendering loop
while (!glfwWindowShouldClose(window)) {
glClearColor(0.3f, 1.0f, 1.0f, 0.0f);
glClear(GL_COLOR_BUFFER_BIT);
glDrawArrays(GL_TRIANGLES, 0, 3);
glFlush();
// read pixels from backbuffer
GLubyte data[DISPLAY_WIDTH * DISPLAY_HEIGHT];
glReadPixels(0, 0, DISPLAY_WIDTH, DISPLAY_HEIGHT, GL_RED, GL_UNSIGNED_BYTE, data);
for (int i = 0; i < DISPLAY_WIDTH * DISPLAY_HEIGHT; i++) {
int a = data[i]; // implicit conversion of unsigned char to int
std::cout << a << std::endl;
}
std::getchar(); // wait for user input
glfwSwapBuffers(window);
glfwPollEvents();
}
glfwTerminate();
return 0;
}
Note, that I am using a default framebuffer, which will treat my color values as normalized signed integers and will convert them to the range between [0-255], i.e. my background color is going to be (76, 255, 255, 0), whereas my geometry color is going to be (51, 0, 0, 0).
And so after I draw my geometry and swap the buffers, I get my image. Now I want to read out the color values. In order to do it, I insert the necessary glReadPixels related code right before I swap buffers:
GLubyte* data = new GLubyte[DISPLAY_WIDTH * DISPLAY_HEIGHT];
glReadPixels(0, 0, DISPLAY_WIDTH, DISPLAY_HEIGHT, GL_RED, GL_UNSIGNED_BYTE, data);
In order to facilitate the process of examining pixel values that I read out from the framebuffer, I just extract red channel, hence the size of data necessary to accommodate pixel data is DISPLAY_WIDTH * DISPLAY_HEIGHT
. Furthermore, it means that the values I print out are supposed to be '76' for the background color, and '51' for geometry.
Surprisingly, every single red channel pixel data (all the DISPLAY_WIDTH * DISPLAY_HEIGHT pixels are printed) I print out happens to be '76', as if geometry is ignored. Note that I read pixels after the draw call and before I swap buffers.
I would greatly appreciate if you could let me know what I am missing in here.
Upvotes: 4
Views: 1636
Reputation: 175
So here is the full story. It is worth mentioning that as Bahbar said, I was reading out pixels correctly. However, I did have a wrong assumption on the size of my default framebuffer, whose dimensions are set to the dimensions of a window that I create.
The window size that I specify in the code with glfwCreateWindow
is 16x16. Clearly, the image that I attached in the original question has greater width.
The GLFW documentation (see glfwSetWindowSize
) states the following: "The window manager may put limits on what sizes are allowed. GLFW cannot and should not override these limits". Well it turns out that I cannot set width less than 120 pixels on my Windows 10 machine. Subsequently, examining data
array of 120x16 pixels, rather than 16x16 pixels, revealed that the value of '51' is indeed, properly recorded.
Then in order to make sure that the geometry is drawn exactly where I want it to be drawn, I need to call gViewport
before my draw calls in order to specify desired transformation from normalized device coordinates (NDC) to pixels in my window (or to pixels in my renderbuffer or a texture, if I am rendering off-screen with a user-defined framebuffer object). Below is the output and a slightly modified code provided by genpfault (his code is completely self-contained and contains shader construction bits that my example lacks):
GLEW version: 2.0.0
GLFW version: 3.2.1 Win32 WGL EGL VisualC
GL_VERSION : 4.5.0 NVIDIA 376.53
GL_VENDOR : NVIDIA Corporation
GL_RENDERER : GeForce GTX 970/PCIe/SSE2
76 76 76 76 76 76 76 76 76 76 76 76 76 76 76 76
76 76 76 76 76 76 76 76 76 76 76 76 76 76 76 76
76 76 76 76 76 76 76 76 76 76 76 76 76 76 76 76
76 76 76 76 76 76 76 76 76 76 76 76 76 76 76 76
76 76 76 76 76 76 76 76 76 76 76 76 76 76 76 76
76 76 76 76 76 76 76 51 51 76 76 76 76 76 76 76
76 76 76 76 76 76 76 51 51 76 76 76 76 76 76 76
76 76 76 76 76 76 51 51 51 51 76 76 76 76 76 76
76 76 76 76 76 76 51 51 51 51 76 76 76 76 76 76
76 76 76 76 76 51 51 51 51 51 51 76 76 76 76 76
76 76 76 76 76 51 51 51 51 51 51 76 76 76 76 76
76 76 76 76 51 51 51 51 51 51 51 51 76 76 76 76
76 76 76 76 76 76 76 76 76 76 76 76 76 76 76 76
76 76 76 76 76 76 76 76 76 76 76 76 76 76 76 76
76 76 76 76 76 76 76 76 76 76 76 76 76 76 76 76
76 76 76 76 76 76 76 76 76 76 76 76 76 76 76 76
The code itself contains a glViewport
call (right before rendering loop) and a slightly modified printing loop in order to match the output to the actual image:
#include <GL/glew.h>
#include <GLFW/glfw3.h>
#include <iostream>
#include <cstdarg>
struct Program
{
static GLuint Load(const char* shader, ...)
{
GLuint prog = glCreateProgram();
va_list args;
va_start(args, shader);
while (shader)
{
const GLenum type = va_arg(args, GLenum);
AttachShader(prog, type, shader);
shader = va_arg(args, const char*);
}
va_end(args);
glLinkProgram(prog);
CheckStatus(prog);
return prog;
}
private:
static void CheckStatus(GLuint obj)
{
GLint status = GL_FALSE;
if (glIsShader(obj)) glGetShaderiv(obj, GL_COMPILE_STATUS, &status);
if (glIsProgram(obj)) glGetProgramiv(obj, GL_LINK_STATUS, &status);
if (status == GL_TRUE) return;
GLchar log[1 << 15] = { 0 };
if (glIsShader(obj)) glGetShaderInfoLog(obj, sizeof(log), NULL, log);
if (glIsProgram(obj)) glGetProgramInfoLog(obj, sizeof(log), NULL, log);
std::cerr << log << std::endl;
exit(EXIT_FAILURE);
}
static void AttachShader(GLuint program, GLenum type, const char* src)
{
GLuint shader = glCreateShader(type);
glShaderSource(shader, 1, &src, NULL);
glCompileShader(shader);
CheckStatus(shader);
glAttachShader(program, shader);
glDeleteShader(shader);
}
};
const GLchar *vertexShaderSource = R"glsl(#version 130
in vec2 position;
void main()
{
gl_Position = vec4(position, 0.0, 1.0);
})glsl";
const GLchar *fragmentShaderSource = R"glsl(#version 130
out vec4 outColor;
void main()
{
outColor = vec4(0.2,0.,0.,0.);
})glsl";
const int DISPLAY_WIDTH = 16;
const int DISPLAY_HEIGHT = 16;
int main(int argc, char** argv)
{
glfwInit();
GLFWwindow* window = glfwCreateWindow(DISPLAY_WIDTH, DISPLAY_HEIGHT, "test", NULL, NULL);
glfwMakeContextCurrent(window);
GLenum res = glewInit();
std::cout << "GLEW version: " << glewGetString(GLEW_VERSION) << std::endl;
std::cout << "GLFW version: " << glfwGetVersionString() << std::endl;
std::cout << "GL_VERSION : " << glGetString(GL_VERSION) << std::endl;
std::cout << "GL_VENDOR : " << glGetString(GL_VENDOR) << std::endl;
std::cout << "GL_RENDERER : " << glGetString(GL_RENDERER) << std::endl << std::endl;
// triangle data (xy-position)
float vertices[] =
{
0.0f, 0.5f,
0.5f, -0.5f,
-0.5f, -0.5f
};
GLuint vbo;
glGenBuffers(1, &vbo);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
// enable vertex xy-position attribute
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 0, 0);
glEnableVertexAttribArray(0);
// compile, link and use shader program
GLuint program = Program::Load
(
vertexShaderSource, GL_VERTEX_SHADER,
fragmentShaderSource, GL_FRAGMENT_SHADER,
NULL
);
glUseProgram(program);
glViewport(0, 0, DISPLAY_WIDTH, DISPLAY_HEIGHT);
// rendering loop
while (!glfwWindowShouldClose(window)) {
glClearColor(0.3f, 1.0f, 1.0f, 0.0f);
glClear(GL_COLOR_BUFFER_BIT);
glDrawArrays(GL_TRIANGLES, 0, 3);
glFlush();
// read pixels from backbuffer
GLubyte data[DISPLAY_WIDTH * DISPLAY_HEIGHT];
glReadPixels(0, 0, DISPLAY_WIDTH, DISPLAY_HEIGHT, GL_RED, GL_UNSIGNED_BYTE, data);
for (int y = DISPLAY_HEIGHT-1; y >= 0; y--)
{
for (int x = 0; x < DISPLAY_WIDTH; x++)
{
std::cout << (int)data[y*DISPLAY_HEIGHT + x] << " "; // implicit conversion of unsigned char to int
}
std::cout << std::endl;
}
std::getchar();
std::cout << std::endl;
glfwSwapBuffers(window);
glfwPollEvents();
}
glfwTerminate();
return 0;
}
Lastly, I couldn't upload a picture using stackoverflow's functionality, so here is a link to imgur for those who are interested: https://i.sstatic.net/AogHw.jpg.
If anyone could let me know if this can be accepted as an answer I'll just go ahead and hit the button. Or perhaps someone else could give a more meaningful answer. Thank you for the input guys!
Upvotes: 1
Reputation: 52157
Workin' fine on my Debian Stretch box:
GLEW version: 2.0.0
GLFW version: 3.2.1 X11 GLX EGL clock_gettime /dev/js Xf86vm shared
GL_VERSION : 3.0 Mesa 13.0.6
GL_VENDOR : Intel Open Source Technology Center
GL_RENDERER : Mesa DRI Intel(R) Kabylake GT2
76 76 76 76 76 76 76 76 76 76 76 76 76 76 76 76
76 76 76 76 76 76 76 76 76 76 76 76 76 76 76 76
76 76 76 76 76 76 76 76 76 76 76 76 76 76 76 76
76 76 76 76 76 76 76 76 76 76 76 76 76 76 76 76
76 76 76 76 51 51 51 51 51 51 51 51 76 76 76 76
76 76 76 76 76 51 51 51 51 51 51 76 76 76 76 76
76 76 76 76 76 51 51 51 51 51 51 76 76 76 76 76
76 76 76 76 76 76 51 51 51 51 76 76 76 76 76 76
76 76 76 76 76 76 51 51 51 51 76 76 76 76 76 76
76 76 76 76 76 76 76 51 51 76 76 76 76 76 76 76
76 76 76 76 76 76 76 51 51 76 76 76 76 76 76 76
76 76 76 76 76 76 76 76 76 76 76 76 76 76 76 76
76 76 76 76 76 76 76 76 76 76 76 76 76 76 76 76
76 76 76 76 76 76 76 76 76 76 76 76 76 76 76 76
76 76 76 76 76 76 76 76 76 76 76 76 76 76 76 76
76 76 76 76 76 76 76 76 76 76 76 76 76 76 76 76
All together (dropped the shader #version
to 130
because Mesa doesn't support anything past GL 3.0 in non-Core contexts):
// g++ main.cpp -lGLEW -lGL -lglfw
#include <GL/glew.h>
#include <GLFW/glfw3.h>
#include <iostream>
#include <cstdarg>
struct Program
{
static GLuint Load( const char* shader, ... )
{
GLuint prog = glCreateProgram();
va_list args;
va_start( args, shader );
while( shader )
{
const GLenum type = va_arg( args, GLenum );
AttachShader( prog, type, shader );
shader = va_arg( args, const char* );
}
va_end( args );
glLinkProgram( prog );
CheckStatus( prog );
return prog;
}
private:
static void CheckStatus( GLuint obj )
{
GLint status = GL_FALSE;
if( glIsShader(obj) ) glGetShaderiv( obj, GL_COMPILE_STATUS, &status );
if( glIsProgram(obj) ) glGetProgramiv( obj, GL_LINK_STATUS, &status );
if( status == GL_TRUE ) return;
GLchar log[ 1 << 15 ] = { 0 };
if( glIsShader(obj) ) glGetShaderInfoLog( obj, sizeof(log), NULL, log );
if( glIsProgram(obj) ) glGetProgramInfoLog( obj, sizeof(log), NULL, log );
std::cerr << log << std::endl;
exit( EXIT_FAILURE );
}
static void AttachShader( GLuint program, GLenum type, const char* src )
{
GLuint shader = glCreateShader( type );
glShaderSource( shader, 1, &src, NULL );
glCompileShader( shader );
CheckStatus( shader );
glAttachShader( program, shader );
glDeleteShader( shader );
}
};
const GLchar *vertexShaderSource = R"glsl(#version 130
in vec2 position;
void main()
{
gl_Position = vec4(position, 0.0, 1.0);
})glsl";
const GLchar *fragmentShaderSource = R"glsl(#version 130
out vec4 outColor;
void main()
{
outColor = vec4(0.2,0.,0.,0.);
})glsl";
const int DISPLAY_WIDTH = 16;
const int DISPLAY_HEIGHT = 16;
int main(int argc, char** argv)
{
glfwInit();
GLFWwindow* window = glfwCreateWindow(DISPLAY_WIDTH, DISPLAY_HEIGHT, "test", NULL, NULL);
glfwMakeContextCurrent(window);
GLenum res = glewInit();
std::cout << "GLEW version: " << glewGetString(GLEW_VERSION) << std::endl;
std::cout << "GLFW version: " << glfwGetVersionString() << std::endl;
std::cout << "GL_VERSION : " << glGetString( GL_VERSION ) << std::endl;
std::cout << "GL_VENDOR : " << glGetString( GL_VENDOR ) << std::endl;
std::cout << "GL_RENDERER : " << glGetString( GL_RENDERER ) << std::endl;
// triangle data (xy-position)
float vertices[] =
{
0.0f, 0.5f,
0.5f, -0.5f,
-0.5f, -0.5f
};
GLuint vbo;
glGenBuffers(1, &vbo);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
// enable vertex xy-position attribute
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 0, 0);
glEnableVertexAttribArray(0);
// compile, link and use shader program
GLuint program = Program::Load
(
vertexShaderSource, GL_VERTEX_SHADER,
fragmentShaderSource, GL_FRAGMENT_SHADER,
NULL
);
glUseProgram( program );
// rendering loop
while (!glfwWindowShouldClose(window)) {
glClearColor(0.3f, 1.0f, 1.0f, 0.0f);
glClear(GL_COLOR_BUFFER_BIT);
glDrawArrays(GL_TRIANGLES, 0, 3);
glFlush();
// read pixels from backbuffer
GLubyte data[DISPLAY_WIDTH * DISPLAY_HEIGHT];
glReadPixels(0, 0, DISPLAY_WIDTH, DISPLAY_HEIGHT, GL_RED, GL_UNSIGNED_BYTE, data);
int i = 0;
for (int y = 0; y < DISPLAY_HEIGHT; y++)
{
for (int x = 0; x < DISPLAY_WIDTH; x++)
{
int a = data[i]; // implicit conversion of unsigned char to int
std::cout << a << " ";;
i++;
}
std::cout << std::endl;
}
std::cout << std::endl;
glfwSwapBuffers(window);
glfwPollEvents();
}
glfwTerminate();
return 0;
}
Upvotes: 2