nick
nick

Reputation: 196

Adding watermark on video

I created an app which records video from camera without sound 10 seconds. Thats part of program code:

...
MediaCodec mMediaCodec = MediaCodec.createEncoderByType("video/avc");
mMediaCodec.configure(mMediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
Surface mSurface = mMediaCodec.createInputSurface();
EGLDisplay mEGLDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
int[] e = new int[2];
EGL14.eglInitialize(mEGLDisplay, e, 0, e, 1);
EGLConfig[] mEGLConfig = new EGLConfig[1];
EGL14.eglChooseConfig(mEGLDisplay, new int[]{EGL14.EGL_RED_SIZE, 8, EGL14.EGL_GREEN_SIZE, 8, EGL14.EGL_BLUE_SIZE, 8, EGL14.EGL_ALPHA_SIZE, 8, EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT, 12610, 1, EGL14.EGL_NONE}, 0, mEGLConfig, 0, 1, new int[1], 0);
EGLContext mEGLContext = EGL14.eglCreateContext(mEGLDisplay, mEGLConfig[0], EGL14.EGL_NO_CONTEXT, new int[]{EGL14.EGL_CONTEXT_CLIENT_VERSION, 2, EGL14.EGL_NONE}, 0);
EGLSurface mEGLSurface = EGL14.eglCreateWindowSurface(mEGLDisplay, mEGLConfig[0], mSurface, new int[]{EGL14.EGL_NONE}, 0);
mMediaCodec.start();
MediaMuxer mMediaMuxer = new MediaMuxer(new File(Environment.getExternalStorageDirectory(), "ipcamera.mp4").getPath(), OutputFormat.MUXER_OUTPUT_MPEG_4);
EGL14.eglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface, mEGLContext);
FloatBuffer mFloatBuffer = ByteBuffer.allocateDirect(80).order(ByteOrder.nativeOrder()).asFloatBuffer();
mFloatBuffer.put(new float[]{-1, -1, 0, 0, 0, 1, -1, 0, 1, 0, -1, 1, 0, 0, 1, 1, 1, 0, 1, 1}).position(0);
float[] sm1 = new float[16], sm2 = new float[16];
Matrix.setIdentityM(sm1, 0);    int program = GLES20.glCreateProgram(), f = GLES20.glCreateShader(GLES20.GL_VERTEX_SHADER), params[] = new int[1];
GLES20.glShaderSource(f, "uniform mat4 uMVPMatrix;n" +          "uniform mat4 uSTMatrix;n" +            "attribute vec4 aPosition;n" +          "attribute vec4 aTextureCoord;n" +          "varying vec2 vTextureCoord;n" +            "void main() {n" +          "   gl_Position = uMVPMatrix * aPosition;n" +           "   vTextureCoord = (uSTMatrix * aTextureCoord).xy;n" +             "}n");  GLES20.glCompileShader(f);
GLES20.glGetShaderiv(f, GLES20.GL_COMPILE_STATUS, params, 0);   GLES20.glAttachShader(program, f);  GLES20.glShaderSource(f = GLES20.glCreateShader(GLES20.GL_FRAGMENT_SHADER), "#extension GL_OES_EGL_image_external : requiren" +             "precision mediump float;n" +           "varying vec2 vTextureCoord;n" +            "uniform samplerExternalOES sTexture;n" +           "void main() {n" +          "   gl_FragColor = texture2D(sTexture, vTextureCoord);n" +          "}n");
GLES20.glCompileShader(f);
GLES20.glGetShaderiv(f, GLES20.GL_COMPILE_STATUS, params, 0);   GLES20.glAttachShader(program, f);
GLES20.glLinkProgram(program);
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, params, 0);
if (params[0] != GLES20.GL_TRUE) GLES20.glDeleteProgram(program);
int maPositionHandle = GLES20.glGetAttribLocation(program, "aPosition"), maTextureHandle = GLES20.glGetAttribLocation(program, "aTextureCoord"), muMVPMatrixHandle = GLES20.glGetUniformLocation(program, "uMVPMatrix"), muSTMatrixHandle = GLES20.glGetUniformLocation(program, "uSTMatrix"), texName[] = new int[1];
GLES20.glGenTextures(1, texName, 0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, texName[0]);
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
(mSurfaceTexture = new SurfaceTexture(texName[0])).setOnFrameAvailableListener(this);
mCamera.setPreviewTexture(mSurfaceTexture);
mCamera.startPreview();
long a = System.currentTimeMillis();
BufferInfo mBufferInfo = new BufferInfo();
boolean b = true;
int c, d = 0;
do {
    synchronized (VideoRecording.this.b) {
        if (!VideoRecording.this.b) continue; else VideoRecording.this.b = false;
    }
    mSurfaceTexture.updateTexImage();
    mSurfaceTexture.getTransformMatrix(sm1);
    GLES20.glClearColor(0, 0, 0, 1);
    GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
    GLES20.glUseProgram(program);
    GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
    GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, texName[0]);
    mFloatBuffer.position(0);
    GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false, 20, mFloatBuffer);
    GLES20.glEnableVertexAttribArray(maPositionHandle);
    mFloatBuffer.position(3);
    GLES20.glVertexAttribPointer(maTextureHandle, 2, GLES20.GL_FLOAT, false, 20, mFloatBuffer);
    GLES20.glEnableVertexAttribArray(maTextureHandle);
    Matrix.setIdentityM(sm2, 0);
    GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, sm2, 0);
    GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, sm1, 0);
    GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
    GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
    EGL14.eglSwapBuffers(mEGLDisplay, mEGLSurface);
    if (!(b = System.currentTimeMillis() - a < 10000)) mMediaCodec.signalEndOfInputStream();
    while ((c = mMediaCodec.dequeueOutputBuffer(mBufferInfo, 10000)) != MediaCodec.INFO_TRY_AGAIN_LATER || !b) {
        if (c == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
            d = mMediaMuxer.addTrack(mMediaCodec.getOutputFormat());
            mMediaMuxer.start();
        } else if (c >= 0) {
            if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) mBufferInfo.size = 0; else mMediaMuxer.writeSampleData(d, (ByteBuffer) mMediaCodec.getOutputBuffers()[c].position(mBufferInfo.offset).limit(mBufferInfo.offset + mBufferInfo.size), mBufferInfo);
            mMediaCodec.releaseOutputBuffer(c, false);
            if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) break;
        }
    }
} while (b);
mMediaCodec.stop();
mMediaCodec.release();
EGL14.eglMakeCurrent(mEGLDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT);
EGL14.eglDestroySurface(mEGLDisplay, mEGLSurface);
EGL14.eglDestroyContext(mEGLDisplay, mEGLContext);
EGL14.eglReleaseThread();
EGL14.eglTerminate(mEGLDisplay);
mSurface.release();
...
@Override
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
    synchronized (VideoRecording.this.b) {
        VideoRecording.this.b = true;
    }
}

Practically I imported all the code from popular CameraToMpegTest.java and tried to make it maximum simple by replacing a lot of code into one block (presented it upper). I am programming on Java language for 3 years but trying to use OpenGL Android libraries first. I had already read a lot of tutorials on this theme but I found very little information about recording video via MediaMuxer and built-in OpenGL libraries. Only the Grafika project contains something useful. How can I add watermark (for example R.mipmap.ic_launcher) on video with specified coordinates? In internet I found not so much information about it, I saw this code on some forum:

Bitmap bitmap = BitmapFactory.decodeResource(getResources(), R.mipmap.ic_launcher);

//Generate one texture pointer...
gl.glGenTextures(1, textures, 0);

//...and bind it to our array
gl.glBindTexture(GL10.GL_TEXTURE_2D, textures[0]);

//Create Nearest Filtered Texture   gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_NEAREST);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);

//Different possible texture parameters, e.g. GL10.GL_CLAMP_TO_EDGE
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_S, GL10.GL_REPEAT);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_T, GL10.GL_REPEAT);

//Use the Android GLUtils to specify a two-dimensional texture image from our bitmap
GLUtils.texImage2D(GL10.GL_TEXTURE_2D, 0, bitmap, 0);

//Clean up
bitmap.recycle();

But really I don't have an idea where to put this code in my way... I tried to put it practically everywhere but nothing was happened or my video had been damaged. As well as one of questions in stackoverflow (I lost a link) programmers affirmed that this thing requires two GLES20 programs... Please tell me correct program code of adding watermark on video and where could I put it in my way. Maybe it's even possible to do that without using OpenGL but with using MediaMuxer and MediaCodec?


Don't offer to me different libraries, which are not built-in in, like FFMPEG. I must to do that via built-in Android libraries. Minimum required API level for my app must be 18 (Android 4.3.1).


@NizaSiwale, that's what I am having:

enter image description here

but I want this:

enter image description here

Upvotes: 0

Views: 1747

Answers (1)

Niza Siwale
Niza Siwale

Reputation: 2405

You can simply use the MediaMuxer to convert the bitmaps you get from the camera and then add a watermark to the bitmap

Firstly add a callback to your camera and add your watermark to your bitmap

       private byte[] currentFrame;
            @Override
            public void onPreviewFrame(byte[] data, Camera camera) {

                Size previewSize = camera.getParameters().getPreviewSize();
                ByteArrayOutputStream baos = new ByteArrayOutputStream();
                byte[] rawImage = null;

                // Decode image from the retrieved buffer to JPEG
                YuvImage yuv = new YuvImage(data, ImageFormat.NV21, previewSize.width, previewSize.height, null);
                yuv.compressToJpeg(new Rect(0, 0, previewSize.width, previewSize.height), YOUR_JPEG_COMPRESSION, baos);
                rawImage = baos.toByteArray();


                Bitmap bitmap = BitmapFactory.decodeByteArray(rawImage, 0, rawImage.length);
             currentFrame =  getNV21(src.getWidth(), src.getHeight(), mark(bitmap, yourWatermark,watermarkLocation));
            }

            public  Bitmap mark(Bitmap src, Bitmap watermark, Point location) {
            int w = src.getWidth();
            int h = src.getHeight();
            Bitmap result = Bitmap.createBitmap(w, h, src.getConfig());

            Canvas canvas = new Canvas(result);
            canvas.drawBitmap(src, 0, 0, null);

            canvas.drawBitmap(watermark, location.x, location.y, null);

            return result;
            }
 byte [] getNV21(int inputWidth, int inputHeight, Bitmap scaled) {

        int [] argb = new int[inputWidth * inputHeight];

        scaled.getPixels(argb, 0, inputWidth, 0, 0, inputWidth, inputHeight);

        byte [] yuv = new byte[inputWidth*inputHeight*3/2];
        encodeYUV420SP(yuv, argb, inputWidth, inputHeight);

        scaled.recycle();

        return yuv;
    }

    void encodeYUV420SP(byte[] yuv420sp, int[] argb, int width, int height) {
        final int frameSize = width * height;

        int yIndex = 0;
        int uvIndex = frameSize;

        int a, R, G, B, Y, U, V;
        int index = 0;
        for (int j = 0; j < height; j++) {
            for (int i = 0; i < width; i++) {

                a = (argb[index] & 0xff000000) >> 24; // a is not used obviously
                R = (argb[index] & 0xff0000) >> 16;
                G = (argb[index] & 0xff00) >> 8;
                B = (argb[index] & 0xff) >> 0;

                // well known RGB to YUV algorithm
                Y = ( (  66 * R + 129 * G +  25 * B + 128) >> 8) +  16;
                U = ( ( -38 * R -  74 * G + 112 * B + 128) >> 8) + 128;
                V = ( ( 112 * R -  94 * G -  18 * B + 128) >> 8) + 128;

                // NV21 has a plane of Y and interleaved planes of VU each sampled by a factor of 2
                //    meaning for every 4 Y pixels there are 1 V and 1 U.  Note the sampling is every other
                //    pixel AND every other scanline.
                yuv420sp[yIndex++] = (byte) ((Y < 0) ? 0 : ((Y > 255) ? 255 : Y));
                if (j % 2 == 0 && index % 2 == 0) { 
                    yuv420sp[uvIndex++] = (byte)((V<0) ? 0 : ((V > 255) ? 255 : V));
                    yuv420sp[uvIndex++] = (byte)((U<0) ? 0 : ((U > 255) ? 255 : U));
                }

                index ++;
            }
        }
    }

After that, just simply use the MediaMuxer to create a video from the frames(bitmaps)

        private void prepareEncoder() {
            try {
                mBufferInfo = new MediaCodec.BufferInfo();

                mediaCodec = MediaCodec.createEncoderByType(MIME_TYPE);
                mediaFormat = MediaFormat.createVideoFormat(MIME_TYPE, WIDTH, HEIGHT);
                mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, calcBitRate());
                mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
                if (Build.VERSION.SDK_INT <= Build.VERSION_CODES.LOLLIPOP) {
                    mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar);
                }else{
                    mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible);
                }
                //2130708361, 2135033992, 21
                mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);

                final MediaFormat audioFormat = MediaFormat.createAudioFormat(MIME_TYPE_AUDIO, SAMPLE_RATE, 1);
                audioFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
                audioFormat.setInteger(MediaFormat.KEY_CHANNEL_MASK, AudioFormat.CHANNEL_IN_MONO);
                audioFormat.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);
                audioFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1);

                mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
                mediaCodec.start();

                mediaCodecForAudio = MediaCodec.createEncoderByType(MIME_TYPE_AUDIO);
                mediaCodecForAudio.configure(audioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
                mediaCodecForAudio.start();

                try {
                    String outputPath = new File(Environment.getExternalStorageDirectory(),
                            "test.mp4").toString();
                    mediaMuxer = new MediaMuxer(outputPath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
                } catch (IOException ioe) {
                    throw new RuntimeException("MediaMuxer creation failed", ioe);
                }
            } catch (IOException e) {
                e.printStackTrace();
            }
        }

    private void bufferEncoder() {
            runnable = new Runnable() {
                @Override
                public void run() {
                    prepareEncoder();
                    try {
                        while (mRunning) {
                            encode();
                        }
                        encode();
                    } finally {
                        release();
                    }
                }
            };
            Thread thread = new Thread(runnable);
            thread.start();
        }

    public void encode() {
                while (true) {
                    if (!mRunning) {
                        break;
                    }
                    int inputBufIndex = mediaCodec.dequeueInputBuffer(TIMEOUT_USEC);
                    long ptsUsec = computePresentationTime(generateIndex);
                    if (inputBufIndex >= 0 && currentFrame!=null) {

                        byte[] input = currentFrame;
                        final ByteBuffer inputBuffer = mediaCodec.getInputBuffer(inputBufIndex);
                        inputBuffer.clear();
                        inputBuffer.put(input);
                        mediaCodec.queueInputBuffer(inputBufIndex, 0, input.length, ptsUsec, 0);
                        generateIndex++;
currentFrame =null;
                    }
                    int encoderStatus = mediaCodec.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
                    if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
                        // no output available yet
                        Log.d("CODEC", "no output from encoder available");
                    } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                        // not expected for an encoder
                        MediaFormat newFormat = mediaCodec.getOutputFormat();
                        mTrackIndex = mediaMuxer.addTrack(newFormat);
                        mediaMuxer.start();
                    } else if (encoderStatus < 0) {
                        Log.i("CODEC", "unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus);
                    } else if (mBufferInfo.size != 0) {
                        ByteBuffer encodedData = mediaCodec.getOutputBuffer(encoderStatus);
                        if (encodedData == null) {
                            Log.i("CODEC", "encoderOutputBuffer " + encoderStatus + " was null");
                        } else {
                            encodedData.position(mBufferInfo.offset);
                            encodedData.limit(mBufferInfo.offset + mBufferInfo.size);
                            mediaMuxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo);
                            mediaCodec.releaseOutputBuffer(encoderStatus, false);
                        }
                    }
                }
            }
        }

Once done, just release the MediaMuxer and it will auto save your video

private void release() {
        if (mediaCodec != null) {
            mediaCodec.stop();
            mediaCodec.release();
            mediaCodec = null;
            Log.i("CODEC", "RELEASE CODEC");
        }
        if (mediaMuxer != null) {
            mediaMuxer.stop();
            mediaMuxer.release();
            mediaMuxer = null;
            Log.i("CODEC", "RELEASE MUXER");
        }
    }

Upvotes: 3

Related Questions