Reputation: 2343
Brief: when I apply to one fragment shader two textures with two different texture coordinates, I see only first texture. But when I use one texture coordinate for two textures it works fine and I can see both textures.
I work with photo filters and use OpenGL ES 2.0 to make filters. Some filters have an advance texture. First texture is a photo and second is a tracery.
Here is my vertext shader
attribute vec4 position;
attribute vec4 inputTextureCoordinate;
attribute vec4 inputTextureCoordinate2;
varying vec2 textureCoordinate;
varying vec2 textureCoordinate2;
void main() {
gl_Position = position;
textureCoordinate = inputTextureCoordinate.xy;
textureCoordinate2 = inputTextureCoordinate2.xy;
}
Here is my fragment shader
precision mediump float;
uniform sampler2D inputImageTexture1;
uniform sampler2D inputImageTexture2;
varying vec2 textureCoordinate;
varying vec2 textureCoordinate2;
void main() {
mediump vec4 color1 = texture2D(inputImageTexture1, textureCoordinate);
mediump vec4 color2 = texture2D(inputImageTexture2, textureCoordinate2);
mediump vec3 colorResult = mix(color1.rgb, color2.rgb, 0.5);
gl_FragColor = vec4(colorResult, 1.0);
}
In my code I use GLSurfaceView.Render implementation.
Initialization of coordinates:
static final float CUBE[] = {-1.0f, 1.0f, 1.0f, 1.0f, -1.0f, -1.0f, 1.0f, -1.0f,};
public static final float COORDINATES1[] = {0.0f, 1.0f, 1.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f,};
public static final float COORDINATES2[] = {0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 1.0f, 1.0f,};
...
mGLCubeBuffer = ByteBuffer.allocateDirect(CUBE.length * 4).order(ByteOrder.nativeOrder()).asFloatBuffer();
mGLCubeBuffer.put(CUBE).position(0);
mGLTextureCoordinates1 = ByteBuffer.allocateDirect(COORDINATES1.length * 4).order(ByteOrder.nativeOrder())
.asFloatBuffer();
mGLTextureCoordinates1.clear();
mGLTextureCoordinates1.put(COORDINATES1).position(0);
mGLTextureCoordinates2 = ByteBuffer.allocateDirect(COORDINATES2.length * 4).order(ByteOrder.nativeOrder())
.asFloatBuffer();
mGLTextureCoordinates2.clear();
mGLTextureCoordinates1.put(COORDINATES2).position(0);
onSurfaceCreate
method:
@Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
GLES20.glClearColor(0, 0, 0, 1);
GLES20.glDisable(GLES20.GL_DEPTH_TEST);
GLES20.glDisable(GLES20.GL_DEPTH_BITS);
String vertexShader = RawResourceReader.readTextFileFromRawResource(mContext, R.raw.test_vertex);
String fragmentShader = RawResourceReader.readTextFileFromRawResource(mContext, R.raw.test_fragment);
mGLProgId = loadProgram(vertexShader, fragmentShader);
mGLAttribPosition = GLES20.glGetAttribLocation(mGLProgId, "position");
mGLAttribTextureCoordinate = GLES20.glGetAttribLocation(mGLProgId, "inputTextureCoordinate");
mGLAttribTextureCoordinate2 = GLES20.glGetAttribLocation(mGLProgId, "inputTextureCoordinate2");
mGLUniformTexture1 = GLES20.glGetUniformLocation(mGLProgId, "inputImageTexture1");
mGLUniformTexture2 = GLES20.glGetUniformLocation(mGLProgId, "inputImageTexture2");
mTexture1 = loadTexture(mContext, R.drawable.photo);
mTexture2 = loadTexture(mContext, R.drawable.formula1);
}
onDrawFrame
method:
@Override
public void onDrawFrame(GL10 gl) {
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
GLES20.glUseProgram(mGLProgId);
mGLCubeBuffer.position(0);
GLES20.glVertexAttribPointer(mGLAttribPosition, 2, GLES20.GL_FLOAT, false, 0, mGLCubeBuffer);
GLES20.glEnableVertexAttribArray(mGLAttribPosition);
//set first coordinates
mGLTextureCoordinates1.position(0);
GLES20.glVertexAttribPointer(mGLAttribTextureCoordinate, 2, GLES20.GL_FLOAT, false, 0, mGLTextureCoordinates1);
GLES20.glEnableVertexAttribArray(mGLAttribTextureCoordinate);
//set second coordinates
mGLTextureCoordinates2.position(0);
GLES20.glVertexAttribPointer(mGLAttribTextureCoordinate2, 2, GLES20.GL_FLOAT, false, 0, mGLTextureCoordinates2);
GLES20.glEnableVertexAttribArray(mGLAttribTextureCoordinate2);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTexture1);
GLES20.glUniform1i(mGLUniformTexture1, 0);
GLES20.glActiveTexture(GLES20.GL_TEXTURE1);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTexture2);
GLES20.glUniform1i(mGLUniformTexture2, 1);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
GLES20.glDisableVertexAttribArray(mGLAttribPosition);
GLES20.glDisableVertexAttribArray(mGLAttribTextureCoordinate);
GLES20.glDisableVertexAttribArray(mGLAttribTextureCoordinate2);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
}
Significant part of loadTexture
method:
GLES20.glGenTextures(1, textureHandle, 0);
// Bind to the texture in OpenGL
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureHandle[0]);
// Set filtering
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA, GLES20.GL_ONE_MINUS_CONSTANT_ALPHA);
GLES20.glEnable(GLES20.GL_BLEND);
// Load the bitmap into the bound texture.
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bitmap, 0);
// Recycle the bitmap, since its data has been loaded into OpenGL.
bitmap.recycle();
Note that on iOs it works fine, but there is used some library. I tried to use library jp.co.cyberagent.android.gpuimage but it has a few bugs and doesn't work properly with this problem.
I want to know how solve this problem. It mays be some property which I don't know or something else. I'm new in OpenGL and hope to your help.
Upvotes: 2
Views: 1897
Reputation: 2832
You can't use GLUtils.texImage2D() to load alpha textures on Android. This is a common problem that Google really should document better. The problem is that the Bitmap class converts all images into pre-multiplied format, but that does not work with OpenGL ES unless the images are completely opaque. The best solution is to use native code. This article gives more detail on this:
Upvotes: 2