Reputation: 31
I want to compare 2 images taken by iPhone camera in my project. I am using OpenCV for doing that. Is there any other better way to do that? If i got the % similarity, It will be great.
I am using OpenCV following code for image comparison:
-(void)opencvImageCompare{
NSMutableArray *valuesArray=[[NSMutableArray alloc]init];
IplImage *img = [self CreateIplImageFromUIImage:imageView.image];
// always check camera image
if(img == 0) {
printf("Cannot load camera img");
}
IplImage *res;
CvPoint minloc, maxloc;
double minval, maxval;
double values;
UIImage *imageTocompare = [UIImage imageNamed:@"MyImageName"];
IplImage *imageTocompareIpl = [self CreateIplImageFromUIImage:imageTocompare];
// always check server image
if(imageTocompareIpl == 0) {
printf("Cannot load serverIplImageArray image");
}
if(img->width-imageTocompareIpl->width<=0 && img->height-imageTocompareIpl->height<=0){
int balWidth=imageTocompareIpl->width-img->width;
int balHeight=imageTocompareIpl->height-img->height;
img->width=img->width+balWidth+100;
img->height=img->height+balHeight+100;
}
CvSize size = cvSize(
img->width - imageTocompareIpl->width + 1,
img->height - imageTocompareIpl->height + 1
);
res = cvCreateImage(size, IPL_DEPTH_32F, 1);
// CV_TM_SQDIFF CV_TM_SQDIFF_NORMED
// CV_TM_CCORR CV_TM_CCORR_NORMED
// CV_TM_CCOEFF CV_TM_CCOEFF_NORMED
cvMatchTemplate(img, imageTocompareIpl, res,CV_TM_CCOEFF);
cvMinMaxLoc(res, &minval, &maxval, &minloc, &maxloc, 0);
printf("\n value %f", maxval-minval);
values=maxval-minval;
NSString *valString=[NSString stringWithFormat:@"%f",values];
[valuesArray addObject:valString];
weedObject.values=[valString doubleValue];
printf("\n------------------------------");
cvReleaseImage(&imageTocompareIpl);
cvReleaseImage(&res);
}
cvReleaseImage(&img);
}
For the same image I am getting non zero result (14956...) and if I pass different image its crash.
Upvotes: 3
Views: 3453
Reputation: 1040
Try this code - it compare images pixel by pixel
-(void)removeindicator :(UIImage *)image
{
for (int i =0; i < [imageArray count]; i++)
{
CGFloat width =100.0f;
CGFloat height=100.0f;
CGSize newSize1 = CGSizeMake(width, height); //whaterver size
UIGraphicsBeginImageContext(newSize1);
[[UIImage imageNamed:[imageArray objectAtIndex:i]] drawInRect:CGRectMake(0, 0, newSize1.width, newSize1.height)];
UIImage *newImage1 = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
UIImageView *imageview_camera=(UIImageView *)[self.view viewWithTag:-3];
CGSize newSize2 = CGSizeMake(width, height); //whaterver size
UIGraphicsBeginImageContext(newSize2);
[[imageview_camera image] drawInRect:CGRectMake(0, 0, newSize2.width, newSize2.height)];
UIImage *newImage2 = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
float numDifferences = 0.0f;
float totalCompares = width * height;
NSArray *img1RGB=[[NSArray alloc]init];
NSArray *img2RGB=[[NSArray alloc]init];
for (int yCoord = 0; yCoord < height; yCoord += 1)
{
for (int xCoord = 0; xCoord < width; xCoord += 1)
{
img1RGB = [self getRGBAsFromImage:newImage1 atX:xCoord andY:yCoord];
img2RGB = [self getRGBAsFromImage:newImage2 atX:xCoord andY:yCoord];
if (([[img1RGB objectAtIndex:0]floatValue] - [[img2RGB objectAtIndex:0]floatValue]) == 0 || ([[img1RGB objectAtIndex:1]floatValue] - [[img2RGB objectAtIndex:1]floatValue]) == 0 || ([[img1RGB objectAtIndex:2]floatValue] - [[img2RGB objectAtIndex:2]floatValue]) == 0)
{
//one or more pixel components differs by 10% or more
numDifferences++;
}
}
}
// It will show result in percentage at last
CGFloat percentage_similar=((numDifferences*100)/totalCompares);
NSString *str=NULL;
if (percentage_similar>=10.0f)
{
str=[[NSString alloc]initWithString:[NSString stringWithFormat:@"%i%@ Identical", (int)((numDifferences*100)/totalCompares),@"%"]];
UIAlertView *alertview=[[UIAlertView alloc]initWithTitle:@"i-App" message:[NSString stringWithFormat:@"%@ Images are same",str] delegate:nil cancelButtonTitle:@"Ok" otherButtonTitles:nil];
[alertview show];
break;
}
else
{
str=[[NSString alloc]initWithString:[NSString stringWithFormat:@"Result: %i%@ Identical",(int)((numDifferences*100)/totalCompares),@"%"]];
UIAlertView *alertview=[[UIAlertView alloc]initWithTitle:@"i-App" message:[NSString stringWithFormat:@"%@ Images are not same",str] delegate:nil cancelButtonTitle:@"OK" otherButtonTitles:nil];
[alertview show];
}
}
}
-(NSArray*)getRGBAsFromImage:(UIImage*)image atX:(int)xx andY:(int)yy
{
//NSArray *result = [[NSArray alloc]init];
// First get the image into your data buffer
CGImageRef imageRef = [image CGImage];
NSUInteger width = CGImageGetWidth(imageRef);
NSUInteger height = CGImageGetHeight(imageRef);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
unsigned char *rawData = (unsigned char*) calloc(height * width * 4, sizeof(unsigned char));
NSUInteger bytesPerPixel = 4;
NSUInteger bytesPerRow = bytesPerPixel * width;
NSUInteger bitsPerComponent = 8;
CGContextRef context = CGBitmapContextCreate(rawData, width, height,
bitsPerComponent, bytesPerRow, colorSpace,
kCGImageAlphaPremultipliedLast | kCGBitmapByteOrder32Big);
CGColorSpaceRelease(colorSpace);
CGContextDrawImage(context, CGRectMake(0, 0, width, height), imageRef);
CGContextRelease(context);
// Now your rawData contains the image data in the RGBA8888 pixel format.
int byteIndex = (bytesPerRow * yy) + xx * bytesPerPixel;
// for (int ii = 0 ; ii < count ; ++ii)
// {
CGFloat red = (rawData[byteIndex] * 1.0) / 255.0;
CGFloat green = (rawData[byteIndex + 1] * 1.0) / 255.0;
CGFloat blue = (rawData[byteIndex + 2] * 1.0) / 255.0;
//CGFloat alpha = (rawData[byteIndex + 3] * 1.0) / 255.0;
byteIndex += 4;
// UIColor *acolor = [UIColor colorWithRed:red green:green blue:blue alpha:alpha];
//}
free(rawData);
NSArray *result = [NSArray arrayWithObjects:
[NSNumber numberWithFloat:red],
[NSNumber numberWithFloat:green],
[NSNumber numberWithFloat:blue],nil];
return result;
}
Upvotes: 2
Reputation: 707
try this code, It compares images bit by bit, ie 100%
UIImage *img1 = // Some photo;
UIImage *img2 = // Some photo;
NSData *imgdata1 = UIImagePNGRepresentation(img1);
NSData *imgdata2 = UIImagePNGRepresentation(img2);
if ([imgdata1 isEqualToData:imgdata2])
{
NSLog(@"Same Image");
}
Upvotes: 2