Reputation: 11
I'm rendering 2 planes with 2 mixed textures in each plane. But when I null one texture is like the GPU replaces the absent texture with other app texture, even with a texture of the previous plane. I programmed a solution within the shader, consisting in detect if the sampler2D textureN has been initied:
void main()
{
vec4 finalPx = vec4(0.0, 0.0, 0.0, 1.0);
if(has_tex_1 > 0.0)
{
finalPx += texture2D(tex_1, uv_1) * 0.5;
}
if(has_tex_2 > 0.0)
{
finalPx += texture2D(tex_2, uv_2) * 0.5;
}
gl_FragColor = finalPx;
}
... but I fear my solution could make it difficult the shader programming for me later and I think webgl should take in charge to make respect the code logic: if the app is not loading a texture the shader should load nothing.
This is my app:
const PICTURES = {
indices: [
0, 1, 2,
0, 3, 1,
],
vertices: [
1.000000, -1.000000, 0.000000,
-1.000000, 1.000000, 0.000000,
-1.000000, -1.000000, 0.000000,
1.000000, 1.000000, 0.000000,
],
uv_layers: {
'uv_1': [
1.000000, 1.077309,
0.000000, -0.077309,
0.000000, 1.077309,
1.000000, -0.077309,
],
'uv_2': [
1.500000, 1.500000,
-0.500000, -0.500000,
-0.500000, 1.500000,
1.500000, -0.500000,
],
},
};
const vertexShaderCode = `
precision mediump float;
attribute vec4 vertex;
attribute vec2 uv;
varying vec2 UV;
attribute vec2 uv2;
varying vec2 UV2;
uniform mat4 projectionMatrix, viewMatrix, modelMatrix;
void main()
{
UV = uv;
UV2 = uv2;
gl_Position = projectionMatrix * viewMatrix * modelMatrix * vertex;
}`;
const fragmentShaderCode = `
precision mediump float;
varying vec2 UV;
varying vec2 UV2;
uniform sampler2D bitmap_1;
uniform sampler2D bitmap_2;
void main() {
// Mix the bitmaps in equal proportions
vec4 px1 = texture2D(bitmap_1, UV) * 0.5;
vec4 px2 = texture2D(bitmap_2, UV2) * 0.5;
gl_FragColor = px1 + px2;
}`;
function loadShader(gl, vertexShaderCode, fragmentShaderCode) {
const vertexShader = gl.createShader(gl.VERTEX_SHADER);
gl.shaderSource(vertexShader, vertexShaderCode);
gl.compileShader(vertexShader);
if (!gl.getShaderParameter(vertexShader, gl.COMPILE_STATUS)) {
alert('An error occurred compiling the shaders: ' + gl.getShaderInfoLog(vertexShader));
gl.deleteShader(vertexShader);
return null;
}
const fragmentShader = gl.createShader(gl.FRAGMENT_SHADER);
gl.shaderSource(fragmentShader, fragmentShaderCode);
gl.compileShader(fragmentShader);
if (!gl.getShaderParameter(fragmentShader, gl.COMPILE_STATUS)) {
alert('An error occurred compiling the shaders: ' + gl.getShaderInfoLog(fragmentShader));
gl.deleteShader(fragmentShader);
return null;
}
return [vertexShader, fragmentShader];
}
function loadTexture(gl, url) {
function isPowerOf2(value){ return (value & (value - 1)) == 0; }
const texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
// Because images have to be download over the internet they might take a moment until they are ready.
// Until then put a single pixel in the texture so we can use it immediately.
// When the image has finished downloading we'll update the texture with the contents of the image.
const level = 0;
const internalFormat= gl.RGBA;
const width = 1;
const height = 1;
const border = 0;
const srcFormat = gl.RGBA;
const srcType = gl.UNSIGNED_BYTE;
const pixel = new Uint8Array([1.0, 0.0, 1.0, 1.0]); // magenta to warn if there is no texture
gl.texImage2D(gl.TEXTURE_2D, level, internalFormat, width, height, border, srcFormat, srcType, pixel);
const image = new Image();
image.onload = function() {
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texImage2D(gl.TEXTURE_2D, level, internalFormat, srcFormat, srcType, image);
// WebGL1 has different requirements for power of 2 images vs non power of 2 images so check if the image is a
// power of 2 in both dimensions.
if (isPowerOf2(image.width) && isPowerOf2(image.height)) {
// Yes, it's a power of 2. Generate mips.
gl.generateMipmap(gl.TEXTURE_2D);
} else {
// No, it's not a power of 2. Turn of mips and set wrapping to clamp to edge
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
}
};
image.src = url;
return texture;
}
function Material(gl, shaders) {
this.shaderProgram = gl.createProgram();
gl.attachShader(this.shaderProgram, shaders[0]); // Vertex shader
gl.attachShader(this.shaderProgram, shaders[1]); // Fragment shader
gl.linkProgram(this.shaderProgram);
if (!gl.getProgramParameter(this.shaderProgram, gl.LINK_STATUS)) {
alert('Unable to initialize the shader program: ' + gl.getProgramInfoLog(this.shaderProgram));
return null;
}
}
function main() {
var surface = document.getElementById('glcanvas');
var gl = surface.getContext('webgl');
if (!gl) {
alert('Unable to initialize WebGL. Your browser or machine may not support it.');
return;
}
// One material for two textures maped within its own uv maps, like any 3D software could allow
let shaders = loadShader(gl,vertexShaderCode, fragmentShaderCode);
// -------------------------------------mesh 1---------------------------------------
// Create the index buffer
const indexBuffer_1 = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, indexBuffer_1);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array(PICTURES.indices), gl.STATIC_DRAW);
// The vertex buffer
const vertexBuffer_1 = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vertexBuffer_1);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(PICTURES.vertices), gl.STATIC_DRAW);
// uv layers buffers (two uv channels for two different bitmaps)
const uvBuffer_1_A = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, uvBuffer_1_A);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(PICTURES.uv_layers['uv_1']), gl.STATIC_DRAW);
const uvBuffer_1_B = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, uvBuffer_1_B);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(PICTURES.uv_layers['uv_2']), gl.STATIC_DRAW);
const material_1 = new Material(gl, shaders);
// Create the textures for the respectives uv buffers
const texture_1 = loadTexture(gl, TEXTURES_FOLDER+'old_fashioned_portrait_1.jpg');
const texture_2 = loadTexture(gl, TEXTURES_FOLDER+'demon.jpg');
// Transform data
let modelMatrix_1 = mat4.create();
mat4.translate(modelMatrix_1, modelMatrix_1, [-1.5, 0.0, 0.0]);
let meshData_1 = {
indices : PICTURES.indices,
indexBuffer : indexBuffer_1,
vertexBuffer: vertexBuffer_1,
uvBuffer_A : uvBuffer_1_A,
uvBuffer_B : uvBuffer_1_B,
material : material_1,
texture_A : texture_1,
texture_B : texture_2,
modelMatrix : modelMatrix_1,
};
// -------------------------------------mesh 2---------------------------------------
// Create the index buffer
const indexBuffer_2 = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, indexBuffer_2);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array(PICTURES.indices), gl.STATIC_DRAW);
// The vertex buffer
const vertexBuffer_2 = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vertexBuffer_2);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(PICTURES.vertices), gl.STATIC_DRAW);
// uv layers buffers (two uv channels for two different bitmaps)
const uvBuffer_2_A = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, uvBuffer_2_A);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(PICTURES.uv_layers['uv_1']), gl.STATIC_DRAW);
const uvBuffer_2_B = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, uvBuffer_2_B);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(PICTURES.uv_layers['uv_2']), gl.STATIC_DRAW);
const material_2 = new Material(gl, shaders);
const texture_3 = loadTexture(gl, TEXTURES_FOLDER+'old_fashioned_portrait_2.jpg');
const texture_4 = loadTexture(gl, TEXTURES_FOLDER+'phantom_girl.jpg');
// Transform data
let modelMatrix_2 = mat4.create();
mat4.translate(modelMatrix_2, modelMatrix_2, [1.5, 0.0, 0.0]);
let meshData_2 = {
indices : PICTURES.indices,
indexBuffer : indexBuffer_2,
vertexBuffer: vertexBuffer_2,
uvBuffer_A : uvBuffer_2_A,
uvBuffer_B : uvBuffer_2_B,
material : material_2,
texture_A : texture_3,
texture_B : texture_4,
modelMatrix : modelMatrix_2,
};
// Render data and methods
requestAnimationFrame(function () {render(gl, meshData_1, meshData_2, Date.now()*0.001);});
}
function render(gl, meshData_1, meshData_2, before)
{
var now = Date.now()*0.001;
var delta = now-before;
rotation += delta;
// ------------------------------------canvas----------------------------------------
gl.clearColor(0.0, 0.5, 0.5, 1.0);
gl.clearDepth(1.0);
gl.enable(gl.DEPTH_TEST);
gl.depthFunc(gl.LEQUAL);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
const aspect = gl.canvas.clientWidth / gl.canvas.clientHeight;
const fieldOfView = 45 * Math.PI / 180;
const zNear = 0.1;
const zFar = 100.0;
let projectionMatrix = mat4.create();
mat4.perspective(projectionMatrix, fieldOfView, aspect, zNear, zFar);
let viewMatrix = mat4.create();
mat4.translate(viewMatrix, viewMatrix, [0.0, 0.0, -5.0]);
// -------------------------------------mesh 1---------------------------------------
gl.useProgram(meshData_1.material.shaderProgram);
// Vertices transformations
gl.uniformMatrix4fv(gl.getUniformLocation(meshData_1.material.shaderProgram, 'projectionMatrix'), false, projectionMatrix);
gl.uniformMatrix4fv(gl.getUniformLocation(meshData_1.material.shaderProgram, 'viewMatrix'), false, viewMatrix);
gl.uniformMatrix4fv(gl.getUniformLocation(meshData_1.material.shaderProgram, 'modelMatrix'), false, meshData_1.modelMatrix);
// Give the GPU the order of the points to form the triangles
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, meshData_1.indexBuffer);
// Give the GPU the transformed vertices
gl.bindBuffer(gl.ARRAY_BUFFER, meshData_1.vertexBuffer);
gl.vertexAttribPointer(gl.getAttribLocation(meshData_1.material.shaderProgram, 'vertex'), 3, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(gl.getAttribLocation(meshData_1.material.shaderProgram, 'vertex'));
// First texture mapped in the triangles with the first uv coordinates
gl.bindBuffer(gl.ARRAY_BUFFER, meshData_1.uvBuffer_A);
gl.vertexAttribPointer(gl.getAttribLocation(meshData_1.material.shaderProgram, 'uv'), 2, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(gl.getAttribLocation(meshData_1.material.shaderProgram, 'uv'));
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, meshData_1.texture_A);
gl.uniform1i(gl.getUniformLocation(meshData_1.material.shaderProgram, 'bitmap_1'), 0);
// Second texture mapped in the triangles with the second uv coordinates
gl.bindBuffer(gl.ARRAY_BUFFER, meshData_1.uvBuffer_B);
gl.vertexAttribPointer(gl.getAttribLocation(meshData_1.material.shaderProgram, 'uv2'), 2, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(gl.getAttribLocation(meshData_1.material.shaderProgram, 'uv2'));
gl.activeTexture(gl.TEXTURE1);
gl.bindTexture(gl.TEXTURE_2D, meshData_1.texture_B);
gl.uniform1i(gl.getUniformLocation(meshData_1.material.shaderProgram, 'bitmap_2'), 1);
gl.drawElements(gl.TRIANGLES, meshData_1.indices.length, gl.UNSIGNED_SHORT, 0);
gl.useProgram(null);
// -------------------------------------mesh 2---------------------------------------
gl.useProgram(meshData_2.material.shaderProgram);
// Vertices transformations
gl.uniformMatrix4fv(gl.getUniformLocation(meshData_2.material.shaderProgram, 'projectionMatrix'), false, projectionMatrix);
gl.uniformMatrix4fv(gl.getUniformLocation(meshData_2.material.shaderProgram, 'viewMatrix'), false, viewMatrix);
gl.uniformMatrix4fv(gl.getUniformLocation(meshData_2.material.shaderProgram, 'modelMatrix'), false, meshData_2.modelMatrix);
// Give the GPU the order of the points to form the triangles
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, meshData_2.indexBuffer);
// Give the GPU the transformed vertices
gl.bindBuffer(gl.ARRAY_BUFFER, meshData_2.vertexBuffer);
gl.vertexAttribPointer(gl.getAttribLocation(meshData_2.material.shaderProgram, 'vertex'), 3, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(gl.getAttribLocation(meshData_2.material.shaderProgram, 'vertex'));
// First texture mapped in the triangles with the first uv coordinates
I point out the problem before going on with the code:
If I comment the next texture render is like the app (the GPU?) feeds the variable 'uniform sampler2D bitmap_1' with the first texture of the previous plane: 'old_fashioned_portrait_1.jpg'
/*
gl.bindBuffer(gl.ARRAY_BUFFER, meshData_2.uvBuffer_A);
gl.vertexAttribPointer(gl.getAttribLocation(meshData_2.material.shaderProgram, 'uv'), 2, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(gl.getAttribLocation(meshData_2.material.shaderProgram, 'uv'));
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, meshData_2.texture_A);
gl.uniform1i(gl.getUniformLocation(meshData_2.material.shaderProgram, 'bitmap_1'), 0);
*/
// Second texture mapped in the triangles with the second uv coordinates
gl.bindBuffer(gl.ARRAY_BUFFER, meshData_2.uvBuffer_B);
gl.vertexAttribPointer(gl.getAttribLocation(meshData_2.material.shaderProgram, 'uv2'), 2, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(gl.getAttribLocation(meshData_2.material.shaderProgram, 'uv2'));
gl.activeTexture(gl.TEXTURE1);
gl.bindTexture(gl.TEXTURE_2D, meshData_2.texture_B);
gl.uniform1i(gl.getUniformLocation(meshData_2.material.shaderProgram, 'bitmap_2'), 1);
gl.drawElements(gl.TRIANGLES, meshData_2.indices.length, gl.UNSIGNED_SHORT, 0);
gl.useProgram(null);
requestAnimationFrame(function(){render(gl, meshData_1, meshData_2, now);});
}
main();
Upvotes: 0
Views: 322
Reputation:
What LJ said about WebGL being a state machine, or rather it has a bunch of state that stays the same until you set something different is true but I want to add a few things
It's arguably considered best practice not to branch in a shader if possible.
If you need a shader to do one thing or another make 2 shaders, one shader that does the one thing and a different shader that does the other. Engines like Unreal and Unity generate 1000s of shader variations. Even three.js generates lots of shader variations.
Branching and texture lookup with varyings will break.
The important part of that link is this
If the texture associated with someOtherSampler uses mipmapping or anisotropic filtering of any kind, then any texture function that requires implicit derivatives in a fragment shader will retrieve undefined results outside of uniform control flow. ...
Note: The GLSL compiler will not give you an error for this. It is perfectly legal GLSL code, and it only produces undefined behavior based on the texture and sampler objects associated with someOtherSampler.
In other words this code
vec4 finalPx = vec4(0.0, 0.0, 0.0, 1.0);
if(has_tex_1 > 0.0)
{
finalPx += texture2D(tex_1, uv_1) * 0.5;
}
if(has_tex_2 > 0.0)
{
finalPx += texture2D(tex_2, uv_2) * 0.5;
}
really needs to be this
vec4 finalPx = vec4(0.0, 0.0, 0.0, 1.0);
vec4 color1 = texture2D(tex_1, uv_1) * 0.5;
vec4 color2 = texture2D(tex_2, uv_2) * 0.5;
if(has_tex_1 > 0.0)
{
finalPx += color1;
}
if(has_tex_2 > 0.0)
{
finalPx += color2;
}
make the parts that look things up from textures based on varyings (uv_1 and uv_2) not be conditional.
you can often design a shader so that it can be used without branching.
In your example you could do this
vec4 finalPx = vec4(0.0, 0.0, 0.0, 1.0);
finalPx += texture2D(tex_1, uv_1) * 0.5;
finalPx += texture2D(tex_2, uv_2) * 0.5;
Then just make a single pixel black texture like this
const blackTexture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, blackTexture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 1, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE,
new Uint8Array([0, 0, 0, 0]));
Now when you only want to use one texture just bind blackTexture
for the
other texture. Because it's 0, 0, 0, 0 the math in your shader will add 0.0
so it will have no effect. There's no more need for has_tex_1
and has_tex_2
.
This is much also much better than relying the binding no texture which just happens to be 0,0,0,1 but will generate warnings in the browser.
Upvotes: 1
Reputation: 8123
WebGL is a state machine, every state you set remains until you set something different. If you want to unbind a texture you have to do so using:
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, null);
Any read from an unbound texture unit will yield vec4(0,0,0,1)
Upvotes: 0