你好,歡迎來到IOS教程網

 Ios教程網 >> IOS編程開發 >> IOS開發綜合 >> ios--OpenCV--圖片比對方案:基於模板圖片的標記識別

ios--OpenCV--圖片比對方案:基於模板圖片的標記識別

編輯:IOS開發綜合

轉載請注明來源:http://blog.csdn.net/wanggsx918/article/details/23833425

#import "UIImage+OpenCV.h"

#import "MyViewController.h"
#import 
#import 
#import 
#import 
#import 

@interface MyViewController ()
- (void)processFrame;
@end

@implementation MyViewController

@synthesize imageView = _imageView;
@synthesize imageView1 = _imageView1;

- (void)viewDidLoad
{
    [super viewDidLoad];
    
    UIImage *mImage =  [UIImage imageNamed:@"防偽標簽004.jpg"];
    IplImage *srcIpl = [self convertToIplImage:mImage];
    IplImage *dscIpl = cvCreateImage(cvGetSize(srcIpl), srcIpl->depth, 1);
    IplImage *dscIplNew = cvCreateImage(cvGetSize(srcIpl),  IPL_DEPTH_8U, 3);
    cvCvtColor(dscIpl, dscIplNew, CV_GRAY2BGR);
    
    
    UIImage *mImage1 =  [UIImage imageNamed:@"防偽標簽.jpg"];
    IplImage *srcIpl1 = [self convertToIplImage:mImage1];
    IplImage *dscIpl1 = cvCreateImage(cvGetSize(srcIpl1), srcIpl1 ->depth, 1);
    IplImage *dscIplNew1 = cvCreateImage(cvGetSize(srcIpl1), IPL_DEPTH_8U, 3);
    cvCvtColor(dscIpl1, dscIplNew1, CV_GRAY2BGR);

    UIImage *tempImage = [UIImage imageNamed:@"temple005.jpg"];
    IplImage *iplTempImage = [self convertToIplImage:tempImage];
    BOOL tf=[self ComparePPKImage:srcIpl withAnotherImage:srcIpl1 withTempleImage:iplTempImage];
    if (tf) {
        printf("匹配成功\n");
    }else
    {
        printf("匹配失敗\n");
    }
}

//圖片匹配
-(BOOL)ComparePPKImage:(IplImage*)mIplImage withAnotherImage:(IplImage*)mIplImage1 withTempleImage:(IplImage*)mTempleImage
{
    //第一次模板標記
    CvPoint minLoc =[self CompareTempleImage:mTempleImage withImage:mIplImage];
    if (minLoc.x==mIplImage->width || minLoc.y==mIplImage->height) {
        printf("第一個圖片的模板標記失敗\n");
        return false;
    }
    //第二次模板標記
    CvPoint minLoc1 =[self CompareTempleImage:mTempleImage withImage:mIplImage1];
    if (minLoc1.x==mIplImage1->width || minLoc1.y==mIplImage1->height) {
        printf("第二個圖片的模板標記失敗\n");
        return false;
    }
    //裁切圖片
    IplImage *cropImage,*cropImage1;
    cropImage =[self cropIplImage:mIplImage withStartPoint:minLoc withWidth:mTempleImage->width withHeight:mTempleImage->height];
    cropImage1=[self cropIplImage:mIplImage1 withStartPoint:minLoc1 withWidth:mTempleImage->width withHeight:mTempleImage->height];
    self.imageView.image=[self convertToUIImage:cropImage];
    self.imageView1.image=[self convertToUIImage:cropImage1];
    double rst = [self CompareHist:cropImage withParam2:cropImage1];
    if (rst<0.05) {
        return true;
    }
    else
    {
        return false;
    }
}

/// 基於模板圖片的標記識別
-(CvPoint)CompareTempleImage:(IplImage*)templeIpl withImage:(IplImage*)mIplImage
{
    IplImage *src = mIplImage;
    IplImage *templat = templeIpl;
    IplImage *result;
    int srcW, srcH, templatW, templatH, resultH, resultW;
    srcW = src->width;
    srcH = src->height;
    templatW = templat->width;
    templatH = templat->height;
    resultW = srcW - templatW + 1;
    resultH = srcH - templatH + 1;
    result = cvCreateImage(cvSize(resultW, resultH), 32, 1);
    cvMatchTemplate(src, templat, result, CV_TM_SQDIFF);
    double minValue, maxValue;
    CvPoint minLoc, maxLoc;
    cvMinMaxLoc(result, &minValue, &maxValue, &minLoc, &maxLoc);
    if (minLoc.y+templatH>srcH || minLoc.x+templatW>srcW) {
        printf("未找到標記圖片\n");
        minLoc.x=srcW;
        minLoc.y=srcH;
    }
    return minLoc;
}


-(IplImage*)cropIplImage:(IplImage*)srcIpl withStartPoint:(CvPoint)mPoint withWidth:(int)width withHeight:(int)height
{
    //裁剪後的圖片
    IplImage *cropImage;
    cvSetImageROI(srcIpl, cvRect(mPoint.x, mPoint.y, width, height));
    cropImage = cvCreateImage(cvGetSize(srcIpl), IPL_DEPTH_8U, 3);
    cvCopy(srcIpl, cropImage);
    cvResetImageROI(srcIpl);
    return cropImage;
}

// 多通道彩色圖片的直方圖比對
-(double)CompareHist:(IplImage*)image1 withParam2:(IplImage*)image2
{
    int hist_size = 256;
    IplImage *gray_plane = cvCreateImage(cvGetSize(image1), 8, 1);
    cvCvtColor(image1, gray_plane, CV_BGR2GRAY);
    CvHistogram *gray_hist = cvCreateHist(1, &hist_size, CV_HIST_ARRAY);
    cvCalcHist(&gray_plane, gray_hist);
    
    IplImage *gray_plane2 = cvCreateImage(cvGetSize(image2), 8, 1);
    cvCvtColor(image2, gray_plane2, CV_BGR2GRAY);
    CvHistogram *gray_hist2 = cvCreateHist(1, &hist_size, CV_HIST_ARRAY);
    cvCalcHist(&gray_plane2, gray_hist2);
    double rst =cvCompareHist(gray_hist, gray_hist2, CV_COMP_BHATTACHARYYA);
    printf("對比結果=%f\n",rst);
    return rst;
}


// 單通道彩色圖片的直方圖
-(double)CompareHistSignle:(IplImage*)image1 withParam2:(IplImage*)image2
{
    int hist_size = 256;
    CvHistogram *gray_hist = cvCreateHist(1, &hist_size, CV_HIST_ARRAY);
    cvCalcHist(&image1, gray_hist);

    CvHistogram *gray_hist2 = cvCreateHist(1, &hist_size, CV_HIST_ARRAY);
    cvCalcHist(&image2, gray_hist2);
    
    return cvCompareHist(gray_hist, gray_hist2, CV_COMP_BHATTACHARYYA);
}


/// UIImage類型轉換為IPlImage類型
-(IplImage*)convertToIplImage:(UIImage*)image
{
    CGImageRef imageRef = image.CGImage;
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
    IplImage *iplImage = cvCreateImage(cvSize(image.size.width, image.size.height), IPL_DEPTH_8U, 4);
    CGContextRef contextRef = CGBitmapContextCreate(iplImage->imageData, iplImage->width, iplImage->height, iplImage->depth, iplImage->widthStep, colorSpace, kCGImageAlphaPremultipliedLast|kCGBitmapByteOrderDefault);
    CGContextDrawImage(contextRef, CGRectMake(0, 0, image.size.width, image.size.height), imageRef);
    CGContextRelease(contextRef);
    CGColorSpaceRelease(colorSpace);
    IplImage *ret = cvCreateImage(cvGetSize(iplImage), IPL_DEPTH_8U, 3);
    cvCvtColor(iplImage, ret, CV_RGB2BGR);
    cvReleaseImage(&iplImage);
    return ret;
}

/// IplImage類型轉換為UIImage類型
-(UIImage*)convertToUIImage:(IplImage*)image
{
    cvCvtColor(image, image, CV_BGR2RGB);
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
    NSData *data = [NSData dataWithBytes:image->imageData length:image->imageSize];
    CGDataProviderRef provider = CGDataProviderCreateWithCFData((CFDataRef)data);
    CGImageRef imageRef = CGImageCreate(image->width, image->height, image->depth, image->depth * image->nChannels, image->widthStep, colorSpace, kCGImageAlphaNone | kCGBitmapByteOrderDefault, provider, NULL, false, kCGRenderingIntentDefault);
    UIImage *ret = [UIImage imageWithCGImage:imageRef];
    CGImageRelease(imageRef);
    CGDataProviderRelease(provider);
    CGColorSpaceRelease(colorSpace);
    return ret;
}

- (void)viewDidUnload
{
    [super viewDidUnload];
    self.imageView = nil;
    self.imageView1=nil;
    delete _videoCapture;
    _videoCapture = nil;
}


@end


  1. 上一頁:
  2. 下一頁:
蘋果刷機越獄教程| IOS教程問題解答| IOS技巧綜合| IOS7技巧| IOS8教程
Copyright © Ios教程網 All Rights Reserved