Reputation: 21
I am attempting to have a GLSL fragment shader distort incoming fragments based on their texture coordinates to poorly simulate a CRT.
After the code failed to work, I ported it to C++ to modify the RGB values of a texture. The code worked as expected.
This brings me to believe that something is wrong with my GLSL code, even though it is mirrored in C++ and works perfectly.
Is there something that I don't know about GLSL math that could be causing this?
C++ code
const unsigned int RED = 0xFFFF0000;
const unsigned int BLUE = 0xFF0000FF;
const float X_MAX = 429.0f/448.0f;
const float Y_MAX = 320.0f/336.0f;
const float X_CORNER = 410.0f/448.0f;
const float Y_CORNER = 306.0f/336.0f;
const float X_COEF = (X_MAX-X_CORNER) / (Y_CORNER * Y_CORNER);
const float Y_COEF = (Y_MAX-Y_CORNER) / (X_CORNER * X_CORNER);
float FUNCX(float y)
{
return X_MAX-X_COEF*y*y;
}
float FUNCY(float x)
{
return Y_MAX-Y_COEF*x*x;
}
unsigned int get(glm::vec2 intex)
{
intex *= 2.0; // Transform the texture rectangle from 0..1
intex.x -= 1.0; // to
intex.y -= 1.0; // -1 .. 1
glm::vec2 d = glm::vec2(0.0,0.0);
d.x = FUNCX(intex.y); // get the curve amount for X values based on Y input
d.y = FUNCY(intex.x); // get the curve amount for Y values based on X input
if (abs(intex.x/d.x) > 1.0) // if the X value is outside of the curve
return RED; // draw RED for debugging
if (abs(intex.y/d.y) > 1.0) // if the Y value is outside of the curve
return BLUE; // draw BLUE for debugging
glm::vec2 outtex = glm::vec2(0.0f,0.0f);
outtex.x = 1.0 + intex.x/d.x; // Now the -1 .. 1 values get shifted back
outtex.y = 1.0 + intex.y/d.y; // to
outtex /= 2.0; // 0 .. 1
return texture.get(512*outtex.x,512*outtex.y);
}
GLSL fragment shader:
const vec4 RED = vec4(1.0,0.0,0.0,1.0);
const vec4 BLUE = vec4(0.0,0.0,1.0,1.0);
const float X_MAX = 429.0/448.0;
const float Y_MAX = 320.0/336.0;
const float X_CORNER = 410.0/448.0;
const float Y_CORNER = 306.0/336.0;
const float X_COEF = (X_MAX-X_CORNER) / (Y_CORNER * Y_CORNER);
const float Y_COEF = (Y_MAX-Y_CORNER) / (X_CORNER * X_CORNER);
float FUNCX(float y)
{
return X_MAX-X_COEF*y*y;
}
float FUNCY(float x)
{
return Y_MAX-Y_COEF*x*x;
}
vec4 get(vec2 intex)
{
intex *= 2.0; // Transform the texture rectangle from 0..1
intex.x -= 1.0; // to
intex.y -= 1.0; // -1 .. 1
vec2 d = vec2(0.0,0.0);
d.x = FUNCX(intex.y); // get the curve amount for X values based on Y input
d.y = FUNCY(intex.x); // get the curve amount for Y values based on X input
if (abs(intex.x/d.x) > 1.0) // if the X value is outside of the curve
return RED; // draw RED for debugging
if (abs(intex.y/d.y) > 1.0) // if the Y value is outside of the curve
return BLUE; // draw BLUE for debugging
vec2 outtex = vec2(0.0,0.0);
outtex.x = 1.0 + intex.x/d.x; // Now the -1 .. 1 values get shifted back
outtex.y = 1.0 + intex.y/d.y; // to
outtex /= 2.0; // 0 .. 1
return texture2D(texture,outtex);
}
Note: The Super Mario World image is for testing purposes only. Note 2: The 512 values in the C++ code are the size of the texture used.
Edit: There was a typo in the GLSL code where a y value was divided by an x instead of y. This has been fixed and the new output is the same because the denominator values are so close.
Upvotes: 0
Views: 188
Reputation: 54642
The code is not the same. In the C++ code:
if (abs(intex.y/d.y) > 1.0) // if the Y value is outside of the curve
return BLUE; // draw BLUE for debugging
In the GLSL code:
if (abs(intex.y/d.x) > 1.0) // if the Y value is outside of the curve
return BLUE; // draw BLUE for debugging
The C++ version divides by d.y
, the GLSL version by d.x
.
Upvotes: 3