Reputation: 452
I'm having trouble getting my shader to run on the perimeter of a texture. I'm following along with https://learnopengl.com/ as a guide and using indexed vertices to draw line primitives. The problem is only the right side and bottom lines are drawn, top and left are missing.
I've written a small program to demonstrate this by trying to draw a red border around a black viewport. It's mostly the same skeleton code from learnopengl.com.
Here's the shader:
#version 330 core
out vec4 FragColor;
void main()
{
FragColor = vec4(1, 0, 0, 1);
}
And the C++ code:
#include <glad/glad.h>
#include <GLFW/glfw3.h>
#include <learnopengl/shader_s.h>
#include <iostream>
void framebuffer_size_callback(GLFWwindow* window, int width, int height);
void processInput(GLFWwindow* window);
const unsigned int SCR_WIDTH = 800;
const unsigned int SCR_HEIGHT = 800;
struct vaoinfo
{
unsigned int VBO, VAO, EBO;
vaoinfo() : VAO(0), VBO(0), EBO(0)
{}
};
void create_vao(vaoinfo& info)
{
glGenVertexArrays(1, &info.VAO);
glGenBuffers(1, &info.VBO);
glGenBuffers(1, &info.EBO);
}
void init_vao(vaoinfo& info, float* vertices, int num_vertices, int* indices, int num_indices)
{
glGenVertexArrays( 1, &info.VAO);
glGenBuffers( 1, &info.VBO);
glGenBuffers( 1, &info.EBO);
glBindVertexArray(info.VAO);
glBindBuffer( GL_ARRAY_BUFFER, info.VBO);
glBufferData( GL_ARRAY_BUFFER, sizeof(float) * num_vertices, vertices, GL_STATIC_DRAW);
glBindBuffer( GL_ELEMENT_ARRAY_BUFFER, info.EBO);
glBufferData( GL_ELEMENT_ARRAY_BUFFER, sizeof(int) * num_indices, indices, GL_STATIC_DRAW);
glVertexAttribPointer( 0, 3, GL_FLOAT, GL_FALSE, 3 * sizeof(float), (void*)0);
glEnableVertexAttribArray( 0);
glBindVertexArray( 0);
}
int main()
{
// glfw: initialize and configure
glfwInit();
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
// glfw window creation
GLFWwindow* window = glfwCreateWindow(SCR_WIDTH, SCR_HEIGHT, "LearnOpenGL", NULL, NULL);
if (window == NULL)
{
std::cout << "Failed to create GLFW window" << std::endl;
glfwTerminate();
return -1;
}
glfwMakeContextCurrent(window);
glfwSetFramebufferSizeCallback(window, framebuffer_size_callback);
if (!gladLoadGLLoader((GLADloadproc)glfwGetProcAddress))
{
std::cout << "Failed to initialize GLAD" << std::endl;
return -1;
}
Shader borderProgram("default.vert.glsl", "border.frag.glsl"); // you can name your shader files however you like
float vertices[] = {
1.f, 1.f, 0.0f, // top right
1.f, -1.f, 0.0f, // bottom right
-1.f, -1.f, 0.0f, // bottom left
-1.f, 1.f, 0.0f, // top left
};
vaoinfo T, B, L, R;
int top[] = { 3,0 };
create_vao(T);
init_vao(T, vertices, 12, top, 2);
int bottom[] = { 1,2 };
create_vao(B);
init_vao(B, vertices, 12, bottom, 2);
int left[] = { 2,3 };
create_vao(L);
init_vao(L, vertices, 12, left, 2);
int right[] = { 0,1 };
create_vao(R);
init_vao(R, vertices, 12, right, 2);
glBindFramebuffer(GL_FRAMEBUFFER, 0);
while (!glfwWindowShouldClose(window))
{
// input
processInput(window);
glClearColor(0.f, 0.f, 0.f, 0.f);
borderProgram.use();
glBindVertexArray(T.VAO);
glDrawElements(GL_LINES, 2, GL_UNSIGNED_INT, nullptr);
glBindVertexArray(B.VAO);
glDrawElements(GL_LINES, 2, GL_UNSIGNED_INT, nullptr);
glBindVertexArray(L.VAO);
glDrawElements(GL_LINES, 2, GL_UNSIGNED_INT, nullptr);
glBindVertexArray(R.VAO);
glDrawElements(GL_LINES, 2, GL_UNSIGNED_INT, nullptr);
glfwSwapBuffers(window);
glfwPollEvents();
Sleep(16);
}
glfwTerminate();
return 0;
}
void processInput(GLFWwindow* window)
{
if (glfwGetKey(window, GLFW_KEY_ESCAPE) == GLFW_PRESS)
glfwSetWindowShouldClose(window, true);
}
void framebuffer_size_callback(GLFWwindow* window, int width, int height)
{
glViewport(0, 0, width, height);
}
Edit: I figured out the issue...kinda. I was under the impression that when addressing vertices (I think it's called "local space" when creating a vao) the range of coordinates for x and y were [-1,1]. But apparently it's actually [-m.x,1] and [-1,m.y] where m=(1-1/w, 1-1/h). Is this assumption correct?
Upvotes: 3
Views: 1092
Reputation: 5797
The X and Y co-ordinates -1 and +1 in clipspace (which is what you draw) map to the very edges of the viewport. The problem is, when you draw a line on the very edge of a pixel, OpenGL needs to decide which of the two pixels shared by that edge is going to produce a fragment. As far as the OpenGL specification goes, an actual OpenGL implementation is free to choose whichever behaviour it wants, so long as certain conditions are met, for example both adjacent pixels should not produce a fragment, only one of them should. The fact that you do not see the pixels of all four edges of the viewport generating fragments for your lines is that an OpenGL implementation also needs to be consistent about that tie-breaking rule. When the viewport were just one pixel larger to the left and to the top (where you currently don't see any pixels of the line), then you'd see the line's pixels there.
If you want to make sure that you see all four lines at all four edges of the viewport, you must adjust the coordinates of the lines such that they cross the center of the pixel (as opposed to the edges between two adjacent pixels).
You can do that by offsetting your vertex coordinates by effectively half a pixel. Since clip space has an extent of 2 units along each X and Y axes, you need to then offset the negative vertex co-ordinates by 1.0f/windowWidth
respectively 1.0f/windowHeight
and the positive co-ordinates by -1.0f/windowWidth
respectively -1.0f/windowHeight
.
It actually helps if you draw a small grid onto paper, mark the very left/bottom edge as -1 and the very right/top edge as +1 and then use simple math to figure out the positions of the centers of the pixels on the edges. This will then be the coordinates of the vertices you need to draw.
Upvotes: 2