Reputation: 4388
I am using Camera in my app to take pictures of ID cards, I have a rectangular overlay to which image will be cropped. issue is that the image quality is reduced once the image is captured.
I am unable to figure out where exactly it is happening. In cutImage method, I am cutting the image but I don't think I am doing anything to the resolution of the image there.
Can any one suggest where the quality might be going down.
takePicture is called when the user clicks to take the picture. Once the picture is taken there is a button 'use picture' that is when usePicture is called.
cutImage method is used to crop the image based on the preview.
any suggestions on how to stop the resolution from going down will be very very helpful
protected void takePicture() {
Log.e(TAG, "takePicture started");
if(null == cameraDevice) {
Log.e(TAG, "cameraDevice is null");
return;
}
try {
ImageReader reader = ImageReader.newInstance(textureViewWidth, textureViewHeight, ImageFormat.JPEG, 1);
List<Surface> outputSurfaces = new ArrayList<Surface>(2);
outputSurfaces.add(reader.getSurface());
outputSurfaces.add(new Surface(textureView.getSurfaceTexture()));
final CaptureRequest.Builder captureBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
captureBuilder.addTarget(reader.getSurface());
captureBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
// Orientation
int rotation = getActivity().getWindowManager().getDefaultDisplay().getRotation();
captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, ORIENTATIONS.get(rotation));
ImageReader.OnImageAvailableListener readerListener = new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader reader) {
Image image = null;
try {
image = reader.acquireLatestImage();
ByteBuffer buffer = image.getPlanes()[0].getBuffer();
byte[] bytes = new byte[buffer.capacity()];
buffer.get(bytes);
takenPictureBytes = bytes;
Log.d(TAG, "takenPictureBytes length - " + takenPictureBytes.length);
} catch (Exception e) {
Log.d(TAG, " onImageAvailable exception ");
e.printStackTrace();
} finally {
if (image != null) {
Log.d(TAG, " image closing");
image.close();
}
}
}
};
reader.setOnImageAvailableListener(readerListener, mBackgroundHandler);
final CameraCaptureSession.CaptureCallback captureListener = new CameraCaptureSession.CaptureCallback() {
@Override
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
super.onCaptureCompleted(session, request, result);
Log.d(TAG, "takePicture - camera capture session");
switchPanels(true);
progress.setVisibility(View.GONE);
}
};
cameraDevice.createCaptureSession(outputSurfaces, new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(CameraCaptureSession session) {
try {
session.capture(captureBuilder.build(), captureListener, mBackgroundHandler);
} catch (CameraAccessException e) {
Log.d(TAG, "takePicture - onConfigured- camera access exception ");
e.printStackTrace();
}
}
@Override
public void onConfigureFailed(CameraCaptureSession session) {
Log.d(TAG, "takePicture - onConfigureFailed");
}
}, mBackgroundHandler);
} catch (CameraAccessException e) {
Log.d(TAG, "takePicture - CameraAccessException ");
e.printStackTrace();
}
}
private void usePicture() {
Log.d(TAG, "usePicture - started ");
if(null != takenPictureBytes ){
try{
String imagePath = null;
Bitmap bitmap = BitmapFactory.decodeByteArray(takenPictureBytes, 0, takenPictureBytes.length);
int bitmapByteCountUsePic = byteSizeOf(bitmap);
Matrix matrix = new Matrix();
matrix.postRotate(90);
Bitmap rotatedBitmap = Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(), bitmap.getHeight(), matrix, true);
if (isFrameMode) {
float withRatio = (float) rotatedBitmap.getWidth() / (float) textureViewWidth;
float heightRatio = (float) rotatedBitmap.getHeight() / (float) textureViewHeight;
Bitmap newImage = cutImage(rotatedBitmap, (int) (photoFrameView.getWidth() * withRatio), (int) (photoFrameView.getHeight() * heightRatio), withRatio);
int bitmapByteCountNewImage = byteSizeOf(newImage);
imagePath = saveBitmap(newImage);
} else {
imagePath = saveBitmap(rotatedBitmap);
}
TakePhotoFragment.TakePhotoFragmentEvent takePhotoFragmentEvent = new TakePhotoFragment.TakePhotoFragmentEvent();
takePhotoFragmentEvent.setImagePath(imagePath);
// send rxjava
//pop backstack
RxBus.getInstance().post(takePhotoFragmentEvent);
getActivity().getSupportFragmentManager().popBackStack();
}catch (Exception e){
Log.d(TAG, "usePicture - exception ");
e.printStackTrace();
}
}else{
Log.d(TAG, "usePicture - takenPictureBytes is null");
DialogUtil.showErrorSnackBar(getView(), R.string.retake_photo );
}
}
public Bitmap cutImage(final Bitmap bitmap, final int pixepWidth, final int pixelsHeight, float widthRatio) {
int bitmapByteCountCutImage = byteSizeOf(bitmap);
Bitmap output = createBitmap(pixepWidth, pixelsHeight, Bitmap.Config.ARGB_8888);
Bitmap original = bitmap;
final Paint paint = new Paint();
Canvas canvas = new Canvas(output);
int padding = (int) ((float) getResources().getDimensionPixelSize(R.dimen.double_padding) * widthRatio);
Rect rect = new Rect(padding, (original.getHeight() - pixelsHeight) / 2, padding + pixepWidth, original.getHeight() - (original.getHeight() - pixelsHeight) / 2);
final RectF cutedRect = new RectF(0, 0, pixepWidth, pixelsHeight);
paint.setAntiAlias(true);
canvas.drawARGB(0, 0, 0, 0);
canvas.drawBitmap(original, rect, cutedRect, paint);
return output;
}
private String saveBitmap(Bitmap bitmap) {
File pictureFileDir = getDir();
if (!pictureFileDir.exists() && !pictureFileDir.mkdirs()) {
Toast.makeText(getActivity(), "Can't create directory to save image.", Toast.LENGTH_LONG).show();
return null;
}
SimpleDateFormat dateFormat = new SimpleDateFormat("yyyymmddhhmmssSSS");
String date = dateFormat.format(new Date());
String photoFile = "Picture_" + date + ".jpg";
String filename = pictureFileDir.getPath() + File.separator + photoFile;
File pictureFile = new File(filename);
try {
ByteArrayOutputStream stream = new ByteArrayOutputStream();
bitmap.compress(Bitmap.CompressFormat.JPEG, 100, stream);
FileOutputStream fos = new FileOutputStream(pictureFile);
fos.write(stream.toByteArray());
fos.close();
return pictureFile.getAbsolutePath();
} catch (Exception error) {
Log.d(TAG, "File" + filename + "not saved: " + error.getMessage());
}
return null;
}
Upvotes: 0
Views: 1090
Reputation: 1698
You are changing the bitmap size/resolution in this code:
float withRatio = (float) rotatedBitmap.getWidth() / (float) textureViewWidth;
float heightRatio = (float) rotatedBitmap.getHeight() / (float) textureViewHeight;
Bitmap newImage = cutImage(rotatedBitmap, (int) (photoFrameView.getWidth() * withRatio), (int) (photoFrameView.getHeight() * heightRatio), withRatio);
int bitmapByteCountNewImage = byteSizeOf(newImage);
imagePath = saveBitmap(newImage);
Put in a breakpoint and see what the new heightRatio and widthRatio are, and what the photoFrameView.getWidth() * withRatio value comes out to. I think you will find it is small compared to the original image. I'm not sure why you are calculating the Ratios with the textureViewWidth/Height, you shouldn't have to do that. Whatever you are displaying the image in should be able to 'Fill' without having to change the size of the underlying bitmap, and thus losing resolution.
You might check out this method:
rawBitmap = ((BitmapDrawable)imageToLoad.getDrawable()).getBitmap();
theBitmap = Bitmap.createScaledBitmap(rawBitmap, 285, 313, false);
Upvotes: 0