Reputation: 21
I'm trying to texture sphere. My vertex shader:
attribute vec3 a_position;
attribute vec3 a_normal;
attribute vec3 a_texCoord0;
uniform mat4 model;
uniform mat4 view;
uniform mat4 projection;
uniform sampler2D u_texture;
varying vec3 fragPos;
varying vec3 normal;
varying vec3 color;
void main()
{
gl_Position = projection * view * model * vec4(a_position, 1.0);
fragPos = vec3(model * vec4(a_position, 1.0));
normal = a_normal;
if(a_texCoord0.x > 50){
color = vec3(1f, 0.0f, 0.0f);
} else {
color = texture(u_texture, a_texCoord0);
}
}
My fragment shader:
#ifdef GL_ES
precision mediump float;
#endif
varying vec3 normal;
varying vec3 color;
varying vec3 fragPos;
uniform vec3 lightPos;
uniform vec3 lightColor;
void main()
{
// Ambient
float ambientStrength = 0.1;
vec3 ambient = ambientStrength * lightColor;
// Diffuse
vec3 norm = normalize(normal);
vec3 lightDir = normalize(lightPos - fragPos);
float diff = max(dot(norm, lightDir), 0.0);
vec3 diffuse = diff * lightColor;
//vec3 result = (ambient + diffuse) * color;
vec3 result = color;
gl_FragColor = vec4(result, 1.0);
}
I build sphere from icosahedron but texture it using 6 same textures and connect it by cubemap principe. Thit is the code how I translate spherical coordinates to UV:
public void fillTexInformation(Vertex vertex){
float[] sphericalCoord = GeometryHelper.toSphericalCoordinates(vertex.getPosition());
vertex.setTexCoord(projection(sphericalCoord[1], sphericalCoord[2]));
}
/**
* Project point on shpere to texture coordinate
* @param theta
* @param phi
* @return
*/
//https://stackoverflow.com/questions/29678510/convert-21-equirectangular-panorama-to-cube-map
private Vector2 projection(float theta, float phi) {
if (theta < 0.615) {
return projectRight(theta, phi);
} else if (theta > 2.527) {
return projectLeft(theta, phi);
} else if (phi <= Math.PI / 4 || phi > 7 * Math.PI / 4) {
return projectBack(theta, phi);
} else if (phi > Math.PI / 4 && phi <= 3 * Math.PI / 4) {
return projectBottom(theta, phi);
} else if (phi >3 * Math.PI / 4 && phi <= 5 * Math.PI / 4) {
return projectFront(theta, phi);
} else if (phi > 5 * Math.PI / 4 && phi <= 7 * Math.PI / 4) {
return projectTop(theta, phi);
} else {
throw new RuntimeException("Algorithm error");
}
}
private Vector2 projectBack(float theta, float phi) {
float y = (float) Math.tan(phi);
float z = (float) ((1 / Math.tan(theta)) / Math.cos(phi));
if (z < -1) {
return projectLeft(theta, phi);
}
if (z > 1) {
return projectRight(theta, phi);
}
return new Vector2(normilizeTexCoord(y), normilizeTexCoord(z));
}
private Vector2 projectBottom(float theta, float phi) {
float x = (float) Math.tan(phi - Math.PI / 2);
float z = (float) ((1 / Math.tan(theta)) / Math.cos(phi - Math.PI / 2));
if (z < -1) {
return projectLeft(theta, phi);
}
if (z > 1) {
return projectRight(theta, phi);
}
// return new Vector2(normilizeTexCoord(x), normilizeTexCoord(z));
return new Vector2(100, 100);
}
private Vector2 projectFront(float theta, float phi) {
float y = (float) Math.tan(phi);
float z = (float) (-(1 / Math.tan(theta)) / Math.cos(phi));
if (z < -1) {
return projectLeft(theta, phi);
}
if (z > 1) {
return projectRight(theta, phi);
}
// return new Vector2(normilizeTexCoord(y), normilizeTexCoord(z));
return new Vector2(100, 100);
}
private Vector2 projectTop(float theta, float phi) {
float x = (float) Math.tan(phi - 3 * Math.PI / 2);
float z = (float) ((1 / Math.tan(theta)) / Math.cos(phi - 3 * Math.PI / 2));
if (z < -1) {
return projectLeft(theta, phi);
}
if (z > 1) {
return projectRight(theta, phi);
}
// return new Vector2(normilizeTexCoord(x), normilizeTexCoord(z));
return new Vector2(100, 100);
}
private Vector2 projectRight(float theta, float phi) {
float x = (float) (Math.tan(theta) * Math.cos(phi));
float y = (float) (Math.tan(theta) * Math.sin(phi));
// return new Vector2(normilizeTexCoord(x), normilizeTexCoord(y));
return new Vector2(100, 100);
}
private Vector2 projectLeft(float theta, float phi) {
float x = (float) (-Math.tan(theta) * Math.cos(phi));
float y = (float) (-Math.tan(theta) * Math.sin(phi));
// return new Vector2(normilizeTexCoord(x), normilizeTexCoord(-y));
return new Vector2(100, 100);
}
private float normilizeTexCoord(float coord){
return (coord + 1) / 2;
}
As a result I get just terrible texture quality loss. This is original texture and what I get on sphere (here is only one part of cubemap, other sides are in red color). I have guessed that it can be connected with the difference of building methods (from icosahedron) and texturing (with kind of cubemap). But it can explain uneven edges of texture but not so terrible quality loss. Can someone please explain to me what happens here?
Upvotes: 1
Views: 143
Reputation: 22167
This happens because you sample the texture in the vertex shader which means you'll only get three colors at the corner of each triangle. The other pixels are interpolated.
For better quality, the texture sampling should be moved to the fragment shader and the uv-coordinates should be interpolated instead of the colors:
Vertex Shader:
attribute vec3 a_position;
attribute vec3 a_normal;
attribute vec3 a_texCoord0;
uniform mat4 model;
uniform mat4 view;
uniform mat4 projection;
varying vec3 fragPos;
varying vec3 normal;
varying vec2 texcoord0;
void main()
{
gl_Position = projection * view * model * vec4(a_position, 1.0);
fragPos = vec3(model * vec4(a_position, 1.0));
normal = a_normal;
texcoord0 = a_texCoord0;
}
Fragment Shader:
varying vec3 normal;
varying vec2 texcoord0;
varying vec3 fragPos;
uniform sampler2D u_texture;
uniform vec3 lightPos;
uniform vec3 lightColor;
void main()
{
vec3 color = texture(u_texture, texcoord0).rgb;
// Ambient
float ambientStrength = 0.1;
vec3 ambient = ambientStrength * lightColor;
// Diffuse
vec3 norm = normalize(normal);
vec3 lightDir = normalize(lightPos - fragPos);
float diff = max(dot(norm, lightDir), 0.0);
vec3 diffuse = diff * lightColor;
//vec3 result = (ambient + diffuse) * color;
vec3 result = color;
gl_FragColor = vec4(result, 1.0);
}
Upvotes: 4