Reputation: 308
Is it possible to create a 16-bit 2d texture image and retrieve the original 16-bit values using readPixels?
I made a jsfiddle where I input and draw a green gradient in 16-bit pixels. I then use readPixels which returns a bunch of [0,0,0,65535] then the next unique value I get is [0,257,0,65535].
I thought this was working properly in full 16-bit until I tested more closely :( Is it possible to get the exact original 16-bit data (not rounded to 8-bit) using getPixels?
https://jsfiddle.net/mortac8/b23cnrxt/5/
// image data
var w = 64;
var h = 64;
var size = w * h * 4;
var img = new Uint16Array(size); // need Uint16Array
for (var i = 0; i < img.length; i += 4) {
img[i + 0] = 0; // r
img[i + 1] = i; // g
img[i + 2] = 0; // b
img[i + 3] = 65535; // a
}
// draw
gl.texImage2D(
gl.TEXTURE_2D, // target
0, // mip level
gl.RGBA16UI, // internal format -> gl.RGBA16UI
w, h, // width and height
0, // border
gl.RGBA_INTEGER, //format -> gm.RGBA_INTEGER
gl.UNSIGNED_SHORT, // type -> gl.UNSIGNED_SHORT
img // texture data
);
This gives me green values like a bunch of 0, then a bunch of 257, then a bunch of 514...
let pix = new Uint16Array(w*h*4);
gl.readPixels(0, 0, w, h, gl.RGBA, gl.UNSIGNED_SHORT, pix);
This gives me all 0s
let pix = new Uint16Array(w*h*4);
gl.readPixels(0, 0, w, h, gl.RGBA_INTEGER, gl.UNSIGNED_SHORT, pix);
// image data
var w = 64;
var h = 64;
var size = w * h * 4;
var img = new Uint16Array(size); // need Uint16Array
for (var i = 0; i < img.length; i += 4) {
img[i + 0] = 0; // r
img[i + 1] = i; // g
img[i + 2] = 0; // b
img[i + 3] = 65535; // a
}
// program
var canvas = document.getElementById('cv');
var gl = canvas.getContext('webgl2');
var program = gl.createProgram();
//var color_buffer_float_16ui = gl.getExtension('EXT_color_buffer_float'); // add for 16-bit
// texture
var tex = gl.createTexture(); // create empty texture
gl.bindTexture(gl.TEXTURE_2D, tex);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texImage2D(
gl.TEXTURE_2D, // target
0, // mip level
gl.RGBA16UI, // internal format -> gl.RGBA16UI
w, h, // width and height
0, // border
gl.RGBA_INTEGER, //format -> gm.RGBA_INTEGER
gl.UNSIGNED_SHORT, // type -> gl.UNSIGNED_SHORT
img // texture data
);
console.log("ORIGINAL IMAGE:");
console.log(img);
// buffer
var buffer = gl.createBuffer();
var bufferData = new Float32Array([
-1, -1,
1, -1,
1, 1,
1, 1,
-1, 1,
-1, -1
]);
gl.bindBuffer(gl.ARRAY_BUFFER, buffer);
gl.bufferData(gl.ARRAY_BUFFER, bufferData, gl.STATIC_DRAW);
// shaders
program.vs = gl.createShader(gl.VERTEX_SHADER);
gl.shaderSource(program.vs, `#version 300 es
in vec4 vertex; // incoming pixel input?
out vec2 pixelCoordinate; // variable used to pass position to fragment shader
void main(){
gl_Position = vertex; // set pixel output position to incoming position (pass through)
pixelCoordinate = vertex.xy*0.5+0.5; // set coordinate for fragment shader
}
`);
program.fs = gl.createShader(gl.FRAGMENT_SHADER);
gl.shaderSource(program.fs, `#version 300 es
precision highp float; // ?
uniform highp usampler2D tex; // ?
in vec2 pixelCoordinate; // receive pixel position from vertex shader
out vec4 fooColor;
void main() {
uvec4 unsignedIntValues = texture(tex, pixelCoordinate);
vec4 floatValues0To65535 = vec4(unsignedIntValues);
vec4 colorValues0To1 = floatValues0To65535 / 65535.0;
fooColor = colorValues0To1;
}
`);
gl.compileShader(program.vs);
checkCompileError(program.vs);
gl.compileShader(program.fs);
checkCompileError(program.fs);
function checkCompileError(s) {
if (!gl.getShaderParameter(s, gl.COMPILE_STATUS)) {
console.error(gl.getShaderInfoLog(s));
}
}
gl.attachShader(program,program.vs);
gl.attachShader(program,program.fs);
gl.deleteShader(program.vs);
gl.deleteShader(program.fs);
// program
gl.bindAttribLocation(program, 0, "vertex");
gl.linkProgram(program);
gl.useProgram(program);
gl.enableVertexAttribArray(0);
gl.vertexAttribPointer(0, 2, gl.FLOAT, false, 0, 0);
gl.clear(gl.COLOR_BUFFER_BIT);
gl.drawArrays(gl.TRIANGLES, 0, 6); // execute program
let pix = new Uint16Array(size);
gl.readPixels(0, 0, w, h, gl.RGBA, gl.UNSIGNED_SHORT, pix);
console.log("READPIXELS:");
console.log(pix);
<canvas id="cv" width=100 height=100></canvas>
Upvotes: 0
Views: 911
Reputation: 396
In that jsfiddle you're drawing to the default framebuffer and reading back from that. The default framebuffer is RGBA8, which is why you get truncation.
I'm sort of surprised that gl.readPixels(0, 0, w, h, gl.RGBA, gl.UNSIGNED_SHORT, pix);
is allowed on your system! Usually it'd be only RGBA+UNSIGNED_BYTE.
If you attach the RGBA16UI texture to a framebuffer, and readPixels from that directly, you can get back the original data untruncated: https://jsfiddle.net/3u2tvqe6/
Worth noting that WebGL 2 doesn't guarantee that you can use the RGBA+UNSIGNED_SHORT with RGBA16UI. For unsigned-int-type textures, the only guaranteed readPixels format is RGBA+UNSIGNED_INT! This is specified on pages 191 and 193 of the GLES spec: https://www.khronos.org/registry/OpenGL/specs/es/3.0/es_spec_3.0.pdf
Upvotes: 3