Reputation: 1532
I want to detect features in an image with OpenCV using back projection.
For a start I would be very happy to compute a histogram of a single colored small image and then apply it on a larger image. Then I can build more on top of that. There is a example in C++ and I would like to do something like this in Java. Sadly, the Java interface to OpenCV is not very well documented.
Below is the code I have so far, but it is not working (obviously, else I wouldn't ask for help). It would be very great if someone could help me get it working or find some good documentation for the Java API!
import java.util.ArrayList;
import org.opencv.core.*;
import org.opencv.imgproc.Imgproc;
public class ColorHistogramDetector extends ColorThresholdDetector {
//private cvHistogram histogram;
//histogram resolution for hue and saturation
static final int hbins = 30;//, sbins = 32;
public synchronized Mat detect(Mat inputFrame) {
Mat calcFrame = new Mat();
Imgproc.cvtColor(inputFrame, calcFrame, Imgproc.COLOR_RGB2HSV);
Mat hue = calcFrame;
ArrayList<Mat> dst = new ArrayList<Mat>();
dst.add(hue);
//create single color image
Mat fillImg = new Mat(16, 16, CvType.CV_8UC3);
fillImg.setTo(hsvColor);
MatOfInt histSize=new MatOfInt(hbins,hbins);
// hue varies from 0 to 179, see cvtColor
// saturation varies from 0 (black-gray-white) to
// 255 (pure spectrum color)
MatOfFloat ranges = new MatOfFloat( 0,180,0,256 );
Mat hist = new Mat();
// we compute the histogram from the 0-th and 1-st channels
MatOfInt channels = new MatOfInt(0, 1);
ArrayList<Mat> fillImgs=new ArrayList<Mat>();
fillImgs.add(fillImg);
Imgproc.calcHist(fillImgs, channels, new Mat(), hist, histSize, ranges);
outputFrame = new Mat();
Imgproc.calcBackProject(dst, channels, hist, calcFrame, ranges, 1);
int w = inputFrame.cols(); int h = inputFrame.rows();
int bin_w = (int) Math.round( (double) w / hbins );
Mat histImg = new Mat( w, h, CvType.CV_8UC3 );
for( int i = 0; i < hbins; i ++ ) {
Core.rectangle( histImg, new Point( i*bin_w, h ),
new Point( (i+1)*bin_w,
h - Math.round( hist.get(0, i)[0]*h/255.0 ) ),
new Scalar( 0, 0, 255 ), -1 );
}
hist.release();
fillImg.release();
Imgproc.cvtColor(histImg, calcFrame, Imgproc.COLOR_RGB2HSV);
return calcFrame;
}
}
Upvotes: 26
Views: 6810
Reputation: 8904
Several things look odd in your code so my advice for you is to first follow the tutorial closely and then change it to your use-case.
You seem somehow close to the tutorial you are following but it looks like you are applying calcHist
to a single colored image. I do not see how that is useful, it should typically be a HSV image with some object(s) instead. Also, you are missing the normalize
step.
To help you with that I have converted that C++ Back Projection Tutorial to OpenCV4Android 2.4.8.
Although you are using Java instead of Android, the API is the exactly the same, only boilerplate input/output handling will differ.
Original C++ code can be found here.
Made some very minor changes to the original tutorial to make it more usable for Android, for example:
I have not yet tested all the steps thoroughly but the end result looks ok.
Note: as it stands, you need to touch the screen once to initialize the Back Projection...
Here is most of it, what is missing you can find here at GitHub:
private int outputWidth=300;
private int outputHeight=200;
private Mat mOutputROI;
private boolean bpUpdated = false;
private Mat mRgba;
private Mat mHSV;
private Mat mask;
private int lo = 20;
private int up = 20;
public void onCameraViewStarted(int width, int height) {
mRgba = new Mat(height, width, CvType.CV_8UC3);
mHSV = new Mat();
mIntermediateMat = new Mat();
mGray = new Mat(height, width, CvType.CV_8UC1);
mOutputROI = new Mat(outputHeight, outputWidth, CvType.CV_8UC1);
mBitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
}
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
Mat mCamera = inputFrame.rgba();
Imgproc.cvtColor(mCamera, mRgba, Imgproc.COLOR_RGBA2RGB);
Mat mOutputROI = mCamera.submat(0, outputHeight, 0, outputWidth);
//Addition to remove some noise:
Imgproc.GaussianBlur(mRgba, mRgba, new Size(5, 5), 0, Imgproc.BORDER_DEFAULT);
Imgproc.cvtColor(mRgba, mHSV, Imgproc.COLOR_RGB2HSV_FULL);
if(mask!=null){
if(bpUpdated==false){
mGray = histAndBackproj();
} else {
bpUpdated = false;
}
Imgproc.resize(mGray, mIntermediateMat, mOutputROI.size(), 0, 0, Imgproc.INTER_LINEAR);
Imgproc.cvtColor(mIntermediateMat, mOutputROI, Imgproc.COLOR_GRAY2BGRA);
}
return mCamera;
}
public boolean onTouch(View arg0, MotionEvent arg1) {
Point seed = getImageCoordinates(mRgba, arg1.getX(), arg1.getY());
int newMaskVal = 255;
Scalar newVal = new Scalar( 120, 120, 120 );
int connectivity = 8;
int flags = connectivity + (newMaskVal << 8 ) + Imgproc.FLOODFILL_FIXED_RANGE + Imgproc.FLOODFILL_MASK_ONLY;
Mat mask2 = Mat.zeros( mRgba.rows() + 2, mRgba.cols() + 2, CvType.CV_8UC1 );
Rect rect = null;
Imgproc.floodFill( mRgba, mask2, seed, newVal, rect, new Scalar( lo, lo, lo ), new Scalar( up, up, up), flags );
// C++:
// mask = mask2( new Range( 1, mask2.rows() - 1 ), new Range( 1, mask2.cols() - 1 ) );
mask = mask2.submat(new Range( 1, mask2.rows() - 1 ), new Range( 1, mask2.cols() - 1 ));
mGray = histAndBackproj();
bpUpdated = true;
return true;
}
private Mat histAndBackproj() {
Mat hist = new Mat();
int h_bins = 30;
int s_bins = 32;
// C++:
//int histSize[] = { h_bins, s_bins };
MatOfInt mHistSize = new MatOfInt (h_bins, s_bins);
// C++:
//float h_range[] = { 0, 179 };
//float s_range[] = { 0, 255 };
//const float* ranges[] = { h_range, s_range };
//int channels[] = { 0, 1 };
MatOfFloat mRanges = new MatOfFloat(0, 179, 0, 255);
MatOfInt mChannels = new MatOfInt(0, 1);
// C++:
// calcHist( &hsv, 1, channels, mask, hist, 2, histSize, ranges, true, false );
boolean accumulate = false;
Imgproc.calcHist(Arrays.asList(mHSV), mChannels, mask, hist, mHistSize, mRanges, accumulate);
// C++:
// normalize( hist, hist, 0, 255, NORM_MINMAX, -1, Mat() );
Core.normalize(hist, hist, 0, 255, Core.NORM_MINMAX, -1, new Mat());
// C++:
// calcBackProject( &hsv, 1, channels, hist, backproj, ranges, 1, true );
Mat backproj = new Mat();
Imgproc.calcBackProject(Arrays.asList(mHSV), mChannels, hist, backproj, mRanges, 1);
return backproj;
}
/**
* Method to scale screen coordinates to image coordinates,
* as they have different resolutions.
*
* x - width; y - height;
* Nexus 4: xMax = 1196; yMax = 768
*
* @param displayX
* @param displayY
* @return
*/
private Point getImageCoordinates(Mat image, float displayX, float displayY){
Display display = getWindowManager().getDefaultDisplay();
android.graphics.Point outSize = new android.graphics.Point();
display.getSize(outSize);
float xScale = outSize.x / (float) image.width();
float yScale = outSize.y / (float) image.height();
return new Point(displayX/xScale, displayY/yScale);
}
Upvotes: 9