Laurynas
Laurynas

Reputation: 175

CameraX issue trying to capture image in memory

With new CameraX API, I am trying to take a Picture in memory using public void takePicture(final OnImageCapturedListener listener) method and then convert the given image to OpenCV Mat.

While I am able to convert image to Mat in Image Analyzer successfully when I am trying to capture the image in high quality there is a problem, the getPlanes returns an array with only one item (wherein image analyzer, I get three SurfacePlane items) and seemingly broken:

Imgur

package com.example.scanner;

import android.content.pm.PackageManager;
import android.graphics.ImageFormat;
import android.media.Image;
import android.os.Bundle;
import android.util.Rational;
import android.util.Size;
import android.view.TextureView;
import android.view.ViewGroup;
import android.widget.Toast;

import org.opencv.core.CvType;
import org.opencv.core.Mat;

import java.nio.ByteBuffer;

import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.appcompat.app.AppCompatActivity;
import androidx.camera.core.CameraX;
import androidx.camera.core.ImageAnalysis;
import androidx.camera.core.ImageAnalysisConfig;
import androidx.camera.core.ImageCapture;
import androidx.camera.core.ImageCaptureConfig;
import androidx.camera.core.ImageProxy;
import androidx.camera.core.Preview;
import androidx.camera.core.PreviewConfig;
import androidx.core.app.ActivityCompat;
import androidx.core.content.ContextCompat;
import androidx.lifecycle.LifecycleOwner;

public class CameraXActivity extends AppCompatActivity {

    private final String[] REQUIRED_PERMISSIONS = new String[]{"android.permission.CAMERA", "android.permission.WRITE_EXTERNAL_STORAGE"};
    //array w/ permissions from manifest
    TextureView mSurfaceView;
    private int REQUEST_CODE_PERMISSIONS = 10; //arbitrary number, can be changed accordingly

    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        setContentView(R.layout.activity_gallery);

        mSurfaceView = findViewById(R.id.action_sync);

        if (allPermissionsGranted()) {
            startCamera(); //start camera if permission has been granted by user
        } else {
            ActivityCompat.requestPermissions(this, REQUIRED_PERMISSIONS, REQUEST_CODE_PERMISSIONS);
        }
    }

    private void startCamera() {
        androidx.camera.core.CameraX.unbindAll();

        /* start preview */
        int aspRatioW = mSurfaceView.getWidth(); // get width of screen
        int aspRatioH = mSurfaceView.getHeight(); // get height
        Rational asp = new Rational(aspRatioW, aspRatioH); // aspect ratio
        Size screen = new Size(aspRatioW, aspRatioH); // size of the screen

        PreviewConfig pConfig = new PreviewConfig.Builder()
                .setTargetAspectRatio(asp)
                .setTargetResolution(screen)
                .setLensFacing(androidx.camera.core.CameraX.LensFacing.BACK)
                .build();
        Preview preview = new Preview(pConfig); //lets build it


        preview.setOnPreviewOutputUpdateListener(
                new Preview.OnPreviewOutputUpdateListener() {
                    @Override
                    public void onUpdated(Preview.PreviewOutput output) {
                        mSurfaceView.setSurfaceTexture(output.getSurfaceTexture());
                    }
                });

        ImageAnalysisConfig imgAConfig = new ImageAnalysisConfig.Builder()
                .setImageReaderMode(ImageAnalysis.ImageReaderMode.ACQUIRE_LATEST_IMAGE)
                .setLensFacing(CameraX.LensFacing.BACK)
                .setTargetResolution(new android.util.Size(2480, 3508))
                .build();
        ImageAnalysis analysis = new ImageAnalysis(imgAConfig);

        analysis.setAnalyzer(
                new ImageAnalysis.Analyzer() {
                    @Override
                    public void analyze(ImageProxy image, int rotationDegrees) {

                        Mat mat = imageToMat(image.getImage()); // no errors here!

                    }
                });

        preview.setOnPreviewOutputUpdateListener(
                new Preview.OnPreviewOutputUpdateListener() {
                    //to update the surface texture we have to destroy it first, then re-add it
                    @Override
                    public void onUpdated(Preview.PreviewOutput output) {
                        ViewGroup parent = (ViewGroup) mSurfaceView.getParent();
                        parent.removeView(mSurfaceView);
                        parent.addView(mSurfaceView, 0);

                        mSurfaceView.setSurfaceTexture(output.getSurfaceTexture());
                    }
                });

        ImageCaptureConfig imgCapConfig =
                new ImageCaptureConfig.Builder()
                        .setCaptureMode(ImageCapture.CaptureMode.MIN_LATENCY)
                        .setLensFacing(CameraX.LensFacing.BACK)
                        .setTargetResolution(new android.util.Size(2480, 3508))
                        .build();
        final ImageCapture imgCap = new ImageCapture(imgCapConfig);

        // call after 5 seconds of starting
        new Thread(() -> {
            try {
                Thread.sleep(5000);
                imgCap.takePicture(new ImageCapture.OnImageCapturedListener() {
                    @Override
                    public void onCaptureSuccess(ImageProxy image, int rotationDegrees) {

                        Mat mat = imageToMat(image.getImage()); // ERROR HERE!

                    }

                    @Override
                    public void onError(ImageCapture.UseCaseError useCaseError, String message, @Nullable Throwable cause) {
                        // Error
                    }
                });
            } catch (Exception e) {
                System.err.println(e);
            }
        }).start();
        //bind to lifecycle:
        androidx.camera.core.CameraX.bindToLifecycle((LifecycleOwner) this, analysis, imgCap, preview);
    }

    @Override
    public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
        //start camera when permissions have been granted otherwise exit app
        if (requestCode == REQUEST_CODE_PERMISSIONS) {
            if (allPermissionsGranted()) {
                startCamera();
            } else {
                Toast.makeText(this, "Permissions not granted by the user.", Toast.LENGTH_SHORT).show();
                finish();
            }
        }
    }

    private boolean allPermissionsGranted() {
        //check if req permissions have been granted
        for (String permission : REQUIRED_PERMISSIONS) {
            if (ContextCompat.checkSelfPermission(this, permission) != PackageManager.PERMISSION_GRANTED) {
                return false;
            }
        }
        return true;
    }


    public static Mat imageToMat(Image image) {
        ByteBuffer buffer;
        int rowStride;
        int pixelStride;
        int width = image.getWidth();
        int height = image.getHeight();
        int offset = 0;

        Image.Plane[] planes = image.getPlanes();
        byte[] data = new byte[image.getWidth() * image.getHeight() * ImageFormat.getBitsPerPixel(ImageFormat.YUV_420_888) / 8];
        byte[] rowData = new byte[planes[0].getRowStride()];

        for (int i = 0; i < planes.length; i++) {
            buffer = planes[i].getBuffer();
            rowStride = planes[i].getRowStride();
            pixelStride = planes[i].getPixelStride();
            int w = (i == 0) ? width : width / 2;
            int h = (i == 0) ? height : height / 2;
            for (int row = 0; row < h; row++) {
                int bytesPerPixel = ImageFormat.getBitsPerPixel(ImageFormat.YUV_420_888) / 8;
                if (pixelStride == bytesPerPixel) {
                    int length = w * bytesPerPixel;
                    buffer.get(data, offset, length);

                    if (h - row != 1) {
                        buffer.position(buffer.position() + rowStride - length);
                    }
                    offset += length;
                } else {


                    if (h - row == 1) {
                        buffer.get(rowData, 0, width - pixelStride + 1);
                    } else {
                        buffer.get(rowData, 0, rowStride);
                    }

                    for (int col = 0; col < w; col++) {
                        data[offset++] = rowData[col * pixelStride];
                    }
                }
            }
        }

        Mat mat = new Mat(height + height / 2, width, CvType.CV_8UC1);
        mat.put(0, 0, data);

        return mat;
    }
}

Tried using 1.0.0-alpha01 and the latest (1.0.0-alpha03) version of the camera. Initially, though the issue was with the YUV to Mat conversion or image being too large, but it was not the case.

The phone used is Huawei P20 Pro.

Upvotes: 4

Views: 6435

Answers (1)

Laurynas
Laurynas

Reputation: 175

Found the issue that was causing this. The captured image is not a Yuv, but in JPEG format instead, as mentioned in the documentation.

To convert JPEG to Mat, the following code can be used:

imgCap.takePicture(new ImageCapture.OnImageCapturedListener() {
                @Override
                public void onCaptureSuccess(ImageProxy image, int rotationDegrees) {
                    ByteBuffer bb = image.getPlanes()[0].getBuffer();
                    byte[] buf = new byte[bb.remaining()];
                    bb.get(buf);

                    Mat mat = Imgcodecs.imdecode(new MatOfByte(buf), Imgcodecs.IMREAD_UNCHANGED);

                    // Do something with Mat...

                    image.close();
                }

                @Override
                public void onError(
                        ImageCapture.UseCaseError error, String message, @Nullable Throwable cause) {

                    // silently ingore error
                }
            });

Upvotes: 4

Related Questions